diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
deleted file mode 100644
index 88207747a..000000000
--- a/.github/CODEOWNERS
+++ /dev/null
@@ -1,18 +0,0 @@
-# Copyright 2023 OpenSPG Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
-# in compliance with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software distributed under the License
-# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
-# or implied.
-
-@andylau-55 @leywar @caszkgui @matthewhyx
-
-/cloudext andy.yj@antgroup.com leywar.liang@antgroup.com caszkgui@gmail.com matthew.hyx@antgroup.com
-/reasoner donghai.ydh@antgroup.com chengqiang.cq@antgroup.com peilong.zpl@antgroup.com zhizhen.lzz@antgroup.com wangshaofei.wsf@antgroup.com andy.yj@antgroup.com caszkgui@gmail.com matthew.hyx@antgroup.com
-/builder andy.yj@antgroup.com leywar.liang@antgroup.com caszkgui@gmail.com matthew.hyx@antgroup.com
-/server andy.yj@antgroup.com matthew.hyx@antgroup.com leywar.liang@antgroup.com caszkgui@gmail.com
-
diff --git a/.github/workflows/cla.yml b/.github/workflows/cla.yml
deleted file mode 100644
index 5a5a5efc0..000000000
--- a/.github/workflows/cla.yml
+++ /dev/null
@@ -1,27 +0,0 @@
-name: "CLA Assistant"
-on:
- issue_comment:
- types: [ created ]
- pull_request_target:
- types: [ opened,closed,synchronize ]
-
-jobs:
- CLAssistant:
- runs-on: ubuntu-latest
- steps:
- - name: "CLA Assistant"
- if: (github.event.comment.body == 'recheck' || github.event.comment.body == 'I have read the CLA Document and I hereby sign the CLA') || github.event_name == 'pull_request_target'
- uses: contributor-assistant/github-action@v2.3.0
- env:
- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- # the below token should have repo scope and must be manually added by you in the repository's secret
- PERSONAL_ACCESS_TOKEN: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
- with:
- path-to-signatures: 'signatures/version1/cla.json'
- path-to-document: 'https://github.com/OpenSPG/cla-assistant/blob/master/CLA.md' # e.g. a CLA or a DCO document
- allowlist: test,bot*
- remote-organization-name: OpenSPG
- remote-repository-name: cla-assistant
- lock-pullrequest-aftermerge: True
-
-
diff --git a/.github/workflows/cloud_code_scan.yml b/.github/workflows/cloud_code_scan.yml
deleted file mode 100644
index a8afaf2c6..000000000
--- a/.github/workflows/cloud_code_scan.yml
+++ /dev/null
@@ -1,22 +0,0 @@
-name: Alipay Cloud Devops Codescan
-on:
- pull_request_target:
-jobs:
- stc:
- runs-on: ubuntu-latest
- steps:
- - name: codeScan
- uses: layotto/alipay-cloud-devops-codescan@main
- with:
- parent_uid: ${{ secrets.ALI_PID }}
- private_key: ${{ secrets.ALI_PK }}
- scan_type: stc
- sca:
- runs-on: ubuntu-latest
- steps:
- - name: codeScan
- uses: layotto/alipay-cloud-devops-codescan@main
- with:
- parent_uid: ${{ secrets.ALI_PID }}
- private_key: ${{ secrets.ALI_PK }}
- scan_type: sca
\ No newline at end of file
diff --git a/.github/workflows/license-checker.yml b/.github/workflows/license-checker.yml
deleted file mode 100644
index 25e2b5b37..000000000
--- a/.github/workflows/license-checker.yml
+++ /dev/null
@@ -1,25 +0,0 @@
-name: License Checker
-
-on:
- push:
- branches:
- - master
- pull_request:
- branches:
- - master
-
-jobs:
- check:
- name: "License Validation"
- runs-on: ubuntu-latest
-
- steps:
- - uses: actions/checkout@v2
- - name: Check License Header
- uses: apache/skywalking-eyes@main
- env:
- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- with:
- log: info
- - name: Check Dependencies' License
- uses: apache/skywalking-eyes/dependency@main
\ No newline at end of file
diff --git a/.github/workflows/openspg-ci.yml b/.github/workflows/openspg-ci.yml
deleted file mode 100644
index c08f3b7f1..000000000
--- a/.github/workflows/openspg-ci.yml
+++ /dev/null
@@ -1,30 +0,0 @@
-# This workflow will build a Java project with Maven, and cache/restore any dependencies to improve the workflow execution time
-# For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-java-with-maven
-
-# This workflow uses actions that are not certified by GitHub.
-# They are provided by a third-party and are governed by
-# separate terms of service, privacy policy, and support
-# documentation.
-
-name: CI
-
-on:
- pull_request:
-
-jobs:
- build:
- runs-on: ubuntu-latest
- steps:
- - uses: actions/checkout@v3
- - name: Set up JDK 1.8
- uses: actions/setup-java@v3
- with:
- java-version: 8
- distribution: 'temurin'
- cache: maven
- - name: Build with Maven
- run: mvn -B install -DskipTests --file pom.xml
- - name: Upload coverage reports to Codecov
- uses: codecov/codecov-action@v3
- env:
- CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
\ No newline at end of file
diff --git a/.github/workflows/pr-title-checker.yml b/.github/workflows/pr-title-checker.yml
deleted file mode 100644
index ae7befd49..000000000
--- a/.github/workflows/pr-title-checker.yml
+++ /dev/null
@@ -1,28 +0,0 @@
-name: "Lint PR"
-
-on:
- pull_request_target:
- types:
- - opened
- - edited
- - synchronize
-
-jobs:
- main:
- name: Validate PR title
- runs-on: ubuntu-latest
- steps:
- # https://www.conventionalcommits.org/en/v1.0.0/#summary
- - uses: amannn/action-semantic-pull-request@v5
- env:
- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- with:
- requireScope: true
- subjectPattern: ^(?![A-Z]).+$
- # If `subjectPattern` is configured, you can use this property to override
- # the default error message that is shown when the pattern doesn't match.
- # The variables `subject` and `title` can be used within the message.
- subjectPatternError: |
- The subject "{subject}" found in the pull request title "{title}"
- didn't match the configured pattern. Please ensure that the subject
- doesn't start with an uppercase character.
\ No newline at end of file
diff --git a/.gitignore b/.gitignore
index 6de61720e..3ef7c9a23 100644
--- a/.gitignore
+++ b/.gitignore
@@ -460,3 +460,11 @@ hs_err_pid*
/logs/
**/spotless-index-file
**/pom.xml.versionsBackup
+
+.java-version
+.vscode/
+
+.classpath
+.factorypath
+.project
+.settings/
\ No newline at end of file
diff --git a/AGENTS.md b/AGENTS.md
new file mode 100644
index 000000000..ec57c0cb5
--- /dev/null
+++ b/AGENTS.md
@@ -0,0 +1,218 @@
+# AGENTS.md
+
+This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository.
+
+## Project Overview
+
+OpenSPG is a knowledge graph engine developed by Ant Group in collaboration with OpenKG, based on the SPG (Semantic-enhanced Programmable Graph) framework. It combines LPG (Labeled Property Graph) structural simplicity with RDF semantic expressiveness for industrial knowledge graph construction and reasoning.
+
+OpenSPG Core Capabilities:
+- **SPG-Schema**: Semantic modeling with schema framework for property graphs
+- **SPG-Builder**: Knowledge construction from structured/unstructured data
+- **SPG-Reasoner**: Logical rule reasoning with KGDSL (Knowledge Graph Domain Specific Language)
+- **KNext**: Programmable framework for knowledge graph solutions
+- **Cloudext**: Cloud adaptation layer for pluggable graph storage and computing engines
+
+## Project Information
+
+- **License**: Apache License 2.0
+- **Java Version**: 17
+- **Scala Version**: 2.11.12
+- **Main Repository**: https://github.com/OpenSPG/OpenSPG
+- **Documentation**: https://openspg.github.io/v2
+
+## Build and Development Commands
+
+### Building the Project
+
+```bash
+# Full build (compiles Java, Scala, and runs tests)
+mvn clean install
+
+# Build without tests
+mvn clean install -DskipTests
+
+# Format code (Spotless for Java, Scalastyle for Scala)
+mvn spotless:apply
+
+# Check code formatting
+mvn spotless:check
+```
+
+### Running Tests
+
+```bash
+# Run all tests
+mvn test
+
+# Run a specific test class
+mvn test -Dtest=ClassName
+
+# Run tests in a specific module
+mvn test -pl module-name
+```
+
+### Local Development with Docker Compose
+
+```bash
+# Start all services (MySQL, Neo4j, MinIO, OpenSPG Server)
+cd dev/release
+./docker-compose.sh up
+
+# Stop services
+./docker-compose.sh down
+
+# Alternative: Use docker-compose directly
+docker-compose -f docker-compose.yml up -d
+docker-compose -f docker-compose.yml down
+```
+
+**Service Ports:**
+- OpenSPG Server: 8887
+- Neo4j HTTP: 7474
+- Neo4j Bolt: 7687
+- MySQL: 3306
+- MinIO API: 9000
+- MinIO Console: 9001
+
+### Running Standalone Components
+
+```bash
+# Local Builder Runner
+java -jar builder/runner/local/target/builder-runner-local-*.jar
+
+# Local Reasoner Runner
+java -jar reasoner/runner/local-runner/target/reasoner-local-runner-*.jar
+```
+
+### Building Docker Images
+
+The OpenSPG server Docker image uses a multi-stage build that automatically builds the JAR from source. The build context must be the project root directory.
+
+```bash
+# Build the server Docker image (from project root)
+cd dev/release/server
+./buildx-release-server.sh
+```
+
+**Important Notes:**
+- The Dockerfile uses a multi-stage build: first stage compiles the JAR, second stage creates the runtime image
+- The build context must be the project root (`.`), not the `dev/release/server` directory
+- The JAR file is built automatically during the Docker build - no need to build it separately first
+- If compilation errors occur, they will be visible during the Docker build process
+
+## Architecture
+
+### Module Structure
+
+**server/** - Main HTTP server built with SOFABoot (Alibaba's Spring Boot extension)
+- Entry point: `server/arks/sofaboot/src/main/java/com/antgroup/openspg/server/arks/sofaboot/Application.java`
+- `api/` - REST API layer (facade, http-client, http-server)
+- `biz/` - Business logic (common, schema, service)
+- `core/` - Core services (schema, scheduler, reasoner models and services)
+- `infra/` - Data access layer (MyBatis DAOs)
+- Configuration: `server/arks/sofaboot/src/main/resources/config/application-*.properties`
+
+**reasoner/** - Logical rule reasoning engine (Scala-based)
+- `lube-api/` - Core abstractions (block, catalog, parser, utils)
+- `lube-logical/` - Logical plan layer
+- `lube-physical/` - Physical plan layer
+- `kgdsl-parser/` - ANTLR4-based KGDSL parser
+- `runner/` - Execution infrastructure (local-runner, runner-common)
+- `catalog/openspg-catalog/` - SPG catalog implementation
+- `udf/` - User-defined functions
+- `warehouse/` - Data warehouse implementations (cloudext-warehouse, warehouse-common)
+
+**builder/** - Knowledge construction pipeline (Java-based)
+- `core/logical/` - Logical plan nodes (LLMBasedExtractNode, RelationMappingNode, SPGTypeMappingNode)
+- `core/physical/` - Physical plan and operators
+- `core/reason/` - Reasoning processors
+- `core/runtime/` - Runtime execution
+- `core/strategy/` - Entity fusing and property linking strategies
+- `model/` - Data models and interfaces
+- `runner/local/` - Local execution runner
+- `testdata/` - Test data utilities
+
+**cloudext/** - Cloud adaptation layer with pluggable interfaces
+- `interface/` - Abstract interfaces for graph-store, search-engine, cache, object-storage, computing-engine
+- `impl/` - Implementations:
+- `graph-store/` - TuGraph, Neo4j
+- `search-engine/` - Elasticsearch, Neo4j
+- `cache/` - Redis
+- `object-storage/` - MinIO, OSS
+
+**common/** - Shared utilities
+- `util/` - Common utility classes
+
+### Key Architectural Patterns
+
+**Logical/Physical Plan Pattern**: Both builder and reasoner use a two-layer optimization approach. Logical plans define what to compute, physical plans define how to compute it (with optimizations).
+
+**Cloudext Plugin System**: External dependencies are abstracted behind interfaces. To add a new graph store, implement `cloudext-interface-graph-store` and add to `impl/graph-store/`.
+
+**Catalog Pattern**: Schema and metadata management is centralized in catalog implementations (`openspg-catalog` for reasoner).
+
+### Technology Stack
+
+- **Languages**: Java 17, Scala 2.11.12
+- **Build**: Maven multi-module
+- **Web Framework**: SOFABoot 3.17.0 (extends Spring Boot 2.7.8)
+- **Data Access**: MyBatis 3.5.2, MySQL 5.1.30
+- **Graph Storage**: TuGraph 1.4.1, Neo4j 4.4.7
+- **Search**: Elasticsearch (via cloudext)
+- **Scala Libraries**: Cats 2.0.0 (functional), Json4s 4.0.6
+- **Language Processing**: ANTLR4 4.8 (KGDSL), QLExpress 3.3.2 (expressions)
+- **Other**: Lombok, Guava, Jackson, Quartz (scheduling), OpenAPI/Swagger
+- **Testing**: JUnit, Spock (Groovy), ScalaTest
+- **Code Quality**: Spotless (Java), Scalastyle (Scala), JaCoCo (coverage)
+
+### Entry Points
+
+1. **Main Server**: `com.antgroup.openspg.server.arks.sofaboot.Application`
+2. **Local Builder**: `com.antgroup.openspg.builder.runner.local.LocalBuilderMain`
+3. **Local Reasoner**: `com.antgroup.openspg.reasoner.runner.local.LocalReasonerMain`
+
+## Development Workflow
+
+### Working with Modules
+
+OpenSPG uses a multi-module Maven structure. You can work on specific modules:
+
+```bash
+# Build only the server module
+mvn clean install -pl server
+
+# Build server and its dependencies
+mvn clean install -pl server -am
+
+# Test only the reasoner module
+mvn test -pl reasoner
+```
+
+### Code Style and Quality
+
+```bash
+# Check code formatting before committing
+mvn spotless:check
+
+# Auto-format code
+mvn spotless:apply
+
+# Run tests with coverage
+mvn clean test jacoco:report
+```
+
+### IDE Configuration
+
+For IntelliJ IDEA:
+1. Enable annotation processing (for Lombok)
+2. Install Scala plugin for Scala modules
+3. Set Java 17 as project SDK
+4. Import as Maven project
+
+### Common Issues
+
+- **Scala Version**: The project uses Scala 2.11.12 - ensure compatibility
+- **Java Version**: Requires Java 17 - verify JAVA_HOME setting
+- **Memory**: Builds may require increased heap space: `export MAVEN_OPTS="-Xmx4g"`
+
diff --git a/common/util/src/main/java/com/antgroup/openspg/common/util/pemja/PythonInvokeMethod.java b/common/util/src/main/java/com/antgroup/openspg/common/util/pemja/PythonInvokeMethod.java
index e9672b6cf..812b309cc 100644
--- a/common/util/src/main/java/com/antgroup/openspg/common/util/pemja/PythonInvokeMethod.java
+++ b/common/util/src/main/java/com/antgroup/openspg/common/util/pemja/PythonInvokeMethod.java
@@ -14,20 +14,20 @@
package com.antgroup.openspg.common.util.pemja;
public enum PythonInvokeMethod {
- BRIDGE_READER("bridge.spg_server_bridge", "SPGServerBridge", "run_reader", ""),
- BRIDGE_SCANNER("bridge.spg_server_bridge", "SPGServerBridge", "run_scanner", ""),
- BRIDGE_COMPONENT("bridge.spg_server_bridge", "SPGServerBridge", "run_component", ""),
+ BRIDGE_READER("kag.bridge.spg_server_bridge", "SPGServerBridge", "run_reader", ""),
+ BRIDGE_SCANNER("kag.bridge.spg_server_bridge", "SPGServerBridge", "run_scanner", ""),
+ BRIDGE_COMPONENT("kag.bridge.spg_server_bridge", "SPGServerBridge", "run_component", ""),
BRIDGE_GET_LLM_TOKEN_INFO(
- "bridge.spg_server_bridge", "SPGServerBridge", "get_llm_token_info", ""),
+ "kag.bridge.spg_server_bridge", "SPGServerBridge", "get_llm_token_info", ""),
BRIDGE_GET_INDEX_MANAGER_NAMES(
- "bridge.spg_server_bridge", "SPGServerBridge", "get_index_manager_names", ""),
+ "kag.bridge.spg_server_bridge", "SPGServerBridge", "get_index_manager_names", ""),
BRIDGE_GET_INDEX_MANAGER_INFO(
- "bridge.spg_server_bridge", "SPGServerBridge", "get_index_manager_info", ""),
- BRIDGE_LLM_CHECKER("bridge.spg_server_bridge", "SPGServerBridge", "run_llm_config_check", ""),
+ "kag.bridge.spg_server_bridge", "SPGServerBridge", "get_index_manager_info", ""),
+ BRIDGE_LLM_CHECKER("kag.bridge.spg_server_bridge", "SPGServerBridge", "run_llm_config_check", ""),
BRIDGE_VECTORIZER_CHECKER(
- "bridge.spg_server_bridge", "SPGServerBridge", "run_vectorizer_config_check", ""),
- BRIDGE_SOLVER_MAIN("bridge.spg_server_bridge", "SPGServerBridge", "run_solver", ""),
- BRIDGE_BUILDER_MAIN("bridge.spg_server_bridge", "SPGServerBridge", "run_builder", "");
+ "kag.bridge.spg_server_bridge", "SPGServerBridge", "run_vectorizer_config_check", ""),
+ BRIDGE_SOLVER_MAIN("kag.bridge.spg_server_bridge", "SPGServerBridge", "run_solver", ""),
+ BRIDGE_BUILDER_MAIN("kag.bridge.spg_server_bridge", "SPGServerBridge", "run_builder", "");
String modulePath;
diff --git a/dev/release/docker-compose.sh b/dev/release/docker-compose.sh
old mode 100644
new mode 100755
diff --git a/dev/release/docker-compose.yml b/dev/release/docker-compose.yml
index 2488ae4b2..a3a53d166 100644
--- a/dev/release/docker-compose.yml
+++ b/dev/release/docker-compose.yml
@@ -2,7 +2,10 @@ version: "3.7"
services:
server:
restart: always
- image: spg-registry.cn-hangzhou.cr.aliyuncs.com/spg/openspg-server:latest
+ build:
+ context: ./server
+ dockerfile: Dockerfile
+ image: registry.cn-hangzhou.aliyuncs.com/lacogito/openspg-server:latest
container_name: release-openspg-server
ports:
- "8887:8887"
@@ -12,6 +15,7 @@ services:
- minio
volumes:
- /etc/localtime:/etc/localtime:ro
+ - ./kag_config.yaml:/app/kag_config.yaml:ro
environment:
TZ: Asia/Shanghai
LANG: C.UTF-8
@@ -25,8 +29,8 @@ services:
'--server.repository.impl.jdbc.host=mysql',
'--server.repository.impl.jdbc.password=openspg',
'--builder.model.execute.num=20',
- '--cloudext.graphstore.url=neo4j://release-openspg-neo4j:7687?user=neo4j&password=neo4j@openspg&database=neo4j',
- '--cloudext.searchengine.url=neo4j://release-openspg-neo4j:7687?user=neo4j&password=neo4j@openspg&database=neo4j'
+ '--cloudext.graphstore.url=neo4j://neo4j:7687?user=neo4j&password=neo4j@openspg&database=neo4j',
+ '--cloudext.searchengine.url=neo4j://neo4j:7687?user=neo4j&password=neo4j@openspg&database=neo4j'
]
mysql:
@@ -67,7 +71,7 @@ services:
- NEO4J_dbms_security_procedures_allowlist=*
volumes:
- /etc/localtime:/etc/localtime:ro
- - $HOME/dozerdb/logs:/logs
+ - /tmp/openspg-neo4j/logs:/logs
minio:
image: spg-registry.cn-hangzhou.cr.aliyuncs.com/spg/openspg-minio:latest
diff --git a/dev/release/kag_config_example.yaml b/dev/release/kag_config_example.yaml
new file mode 100644
index 000000000..80527c48e
--- /dev/null
+++ b/dev/release/kag_config_example.yaml
@@ -0,0 +1,180 @@
+#------------project configuration start----------------#
+openie_llm: &openie_llm
+ type: maas
+ base_url: https://dashscope.aliyuncs.com/compatible-mode/v1/
+ api_key: key
+ model: qwen2.5-7b-instruct-1m
+ enable_check: false
+
+chat_llm: &chat_llm
+ type: maas
+ base_url: https://dashscope.aliyuncs.com/compatible-mode/v1/
+ api_key: key
+ model: qwen2.5-72b-instruct
+ enable_check: false
+
+vectorize_model: &vectorize_model
+ api_key: key
+ base_url: https://api.siliconflow.cn/v1/
+ model: BAAI/bge-m3
+ type: openai
+ vector_dimensions: 1024
+ enable_check: false
+vectorizer: *vectorize_model
+
+log:
+ level: INFO
+
+project:
+ biz_scene: default
+ host_addr: http://127.0.0.1:8887
+ id: "1"
+ language: en
+ namespace: HotpotQATest
+#------------project configuration end----------------#
+
+#------------kag-builder configuration start----------------#
+kag_builder_pipeline:
+ chain:
+ type: unstructured_builder_chain # kag.builder.default_chain.DefaultUnstructuredBuilderChain
+ extractor:
+ type: knowledge_unit_extractor
+ llm: *openie_llm
+ ner_prompt:
+ type: knowledge_unit_ner
+ triple_prompt:
+ type: knowledge_unit_triple
+ kn_prompt:
+ type: knowledge_unit
+ reader:
+ type: dict_reader # kag.builder.component.reader.dict_reader.DictReader
+ post_processor:
+ type: kag_post_processor # kag.builder.component.postprocessor.kag_postprocessor.KAGPostProcessor
+ splitter:
+ type: length_splitter # kag.builder.component.splitter.length_splitter.LengthSplitter
+ split_length: 100000
+ window_length: 0
+ vectorizer:
+ type: batch_vectorizer # kag.builder.component.vectorizer.batch_vectorizer.BatchVectorizer
+ vectorize_model: *vectorize_model
+ writer:
+ type: kg_writer # kag.builder.component.writer.kg_writer.KGWriter
+ num_threads_per_chain: 1
+ num_chains: 16
+ scanner:
+ type: hotpotqa_dataset_scanner # kag.builder.component.scanner.dataset_scanner.HotpotqaCorpusScanner
+#------------kag-builder configuration end----------------#
+
+#------------kag-solver configuration start----------------#
+search_api: &search_api
+ type: openspg_search_api #kag.solver.tools.search_api.impl.openspg_search_api.OpenSPGSearchAPI
+
+graph_api: &graph_api
+ type: openspg_graph_api #kag.solver.tools.graph_api.impl.openspg_graph_api.OpenSPGGraphApi
+
+
+kg_cs: &kg_cs
+ type: kg_cs_open_spg
+ priority: 0
+ path_select:
+ type: exact_one_hop_select
+ graph_api: *graph_api
+ search_api: *search_api
+ entity_linking:
+ type: entity_linking
+ graph_api: *graph_api
+ search_api: *search_api
+ recognition_threshold: 0.9
+ exclude_types:
+ - Chunk
+ - AtomicQuery
+ - KnowledgeUnit
+ - Summary
+ - Outline
+ - Doc
+
+kg_fr: &kg_fr
+ type: kg_fr_knowledge_unit
+ top_k: 20
+ graph_api: *graph_api
+ search_api: *search_api
+ vectorize_model: *vectorize_model
+ path_select:
+ type: fuzzy_one_hop_select
+ llm_client: *openie_llm
+ graph_api: *graph_api
+ search_api: *search_api
+ ppr_chunk_retriever_tool:
+ type: ppr_chunk_retriever
+ llm_client: *chat_llm
+ graph_api: *graph_api
+ search_api: *search_api
+ entity_linking:
+ type: entity_linking
+ graph_api: *graph_api
+ search_api: *search_api
+ recognition_threshold: 0.8
+ exclude_types:
+ - Chunk
+ - AtomicQuery
+ - KnowledgeUnit
+ - Summary
+ - Outline
+ - Doc
+
+rc: &rc
+ type: rc_open_spg
+ vector_chunk_retriever:
+ type: vector_chunk_retriever
+ vectorize_model: *vectorize_model
+ score_threshold: 0.65
+ search_api: *search_api
+ graph_api: *graph_api
+ search_api: *search_api
+ vectorize_model: *vectorize_model
+ top_k: 20
+
+kag_hybrid_executor: &kag_hybrid_executor_conf
+ type: kag_hybrid_retrieval_executor
+ retrievers:
+ - *kg_cs
+ - *kg_fr
+ - *rc
+ merger:
+ type: kag_merger
+ enable_summary: true
+
+kag_output_executor: &kag_output_executor_conf
+ type: kag_output_executor
+ llm_module: *chat_llm
+
+kag_deduce_executor: &kag_deduce_executor_conf
+ type: kag_deduce_executor
+ llm_module: *chat_llm
+
+py_code_based_math_executor: &py_code_based_math_executor_conf
+ type: py_code_based_math_executor
+ llm: *chat_llm
+
+kag_solver_pipeline:
+ type: kag_static_pipeline
+ planner:
+ type: lf_kag_static_planner
+ llm: *chat_llm
+ plan_prompt:
+ type: default_lf_static_planning
+ rewrite_prompt:
+ type: default_rewrite_sub_task_query
+ executors:
+ - *kag_hybrid_executor_conf
+ - *py_code_based_math_executor_conf
+ - *kag_deduce_executor_conf
+ - *kag_output_executor_conf
+ generator:
+ type: default_generator # kag.solver.implementation.default_generator.DefaultGenerator
+ llm_client: *chat_llm
+ generated_prompt:
+ type: default_refer_generator_prompt
+ enable_ref: true
+
+#------------kag-solver configuration end----------------#
diff --git a/dev/release/python/Dockerfile b/dev/release/python/Dockerfile
index 1220b9f1c..0c69d579e 100644
--- a/dev/release/python/Dockerfile
+++ b/dev/release/python/Dockerfile
@@ -9,44 +9,57 @@
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
# or implied.
-FROM docker.m.daocloud.io/ubuntu:20.04
+# Image for building OpenSPG Python releases. Based on the same base image as the server.
+#
+# Includes:
+# * Java 17 (from base image)
+# * Conda with Python environment
+# * Python packages: openspg-kag, pemja
+
+FROM registry.cn-hangzhou.aliyuncs.com/lacogito/conda-forge:ubuntu2204
+
+# Replace apt sources with Aliyun mirror for faster downloads
+RUN sed -i 's/archive.ubuntu.com/mirrors.aliyun.com/g' /etc/apt/sources.list && \
+ sed -i 's/security.ubuntu.com/mirrors.aliyun.com/g' /etc/apt/sources.list
# For apt to be noninteractive
-ENV DEBIAN_FRONTEND noninteractive
-ENV DEBCONF_NONINTERACTIVE_SEEN true
-ENV LANG C.UTF-8
-ENV PATH=/home/admin/miniconda3/bin:$PATH
+ENV DEBIAN_FRONTEND=noninteractive
+ENV DEBCONF_NONINTERACTIVE_SEEN=true
+ENV LANG=C.UTF-8
ARG TARGETPLATFORM
ARG APT_INSTALL="apt-get install --no-install-recommends -y"
-ARG MINICONDA_FILE
RUN apt-get clean && apt-get update && \
$APT_INSTALL ca-certificates && \
$APT_INSTALL ca-certificates-java && \
$APT_INSTALL git less vim wget curl telnet nano jq procps net-tools unzip zip tar gzip nfs-common && \
- # Install openjdk 8.
- $APT_INSTALL openjdk-8-jdk && \
- update-alternatives --set java /usr/lib/jvm/java-8-openjdk-$(dpkg --print-architecture)/jre/bin/java && \
- # Install build / source control tools
- $APT_INSTALL wget git maven less vim
-
-RUN if [ "${TARGETPLATFORM}" = "linux/amd64" ]; then \
- export JAVA_HOME=/usr/lib/jvm/java-8-openjdk-amd64; \
- else \
- export JAVA_HOME=/usr/lib/jvm/java-8-openjdk-arm64; \
- $APT_INSTALL gcc-aarch64-linux-gnu; \
- $APT_INSTALL build-essential libpython3.8-dev; \
- fi && \
+ $APT_INSTALL openjdk-17-jre && \
+ apt-get clean
+
+# Java 17 is already installed in the base image, just set JAVA_HOME
+RUN JAVA_HOME=$(find /usr/lib/jvm/java-17-openjdk-* -maxdepth 0 -type d | head -1) && \
+ update-alternatives --set java "$JAVA_HOME/bin/java" && \
echo "export JAVA_HOME=${JAVA_HOME}" >> /etc/profile
-RUN . /etc/profile && echo ${JAVA_HOME} && mkdir -p /home/admin/ && chmod -R 777 /home/admin &&\
- # wget -q http://hyperloop.cn-hangzhou.alipay.aliyun-inc.com/kgrl/user/zhongshu.zzs/tmp/Miniconda3-py310_25.1.1-2-Linux-x86_64.sh && \
- wget -q http://hyperloop.cn-hangzhou.alipay.aliyun-inc.com/kgrl/user/zhongshu.zzs/tmp/$MINICONDA_FILE && \
- ln -s /usr/bin/md5sum /usr/bin/md5 &&\
- bash $MINICONDA_FILE -b -u -p /home/admin/miniconda3 &&\
- rm -rf $MINICONDA_FILE && \
- /home/admin/miniconda3/bin/conda init &&\
- pip install openspg-kag==0.7.0 &&\
- pip install pemja==0.4.0 && \
- pip cache purge
+# Install additional build tools for ARM64 if needed
+RUN if [ "${TARGETPLATFORM}" = "linux/arm64" ]; then \
+ $APT_INSTALL gcc-aarch64-linux-gnu build-essential libpython3-dev; \
+ fi
+
+# Create a new environment named 'app_env' with Python 3.10 (matching server Dockerfile)
+RUN conda create -n app_env python=3.10 -y && conda clean -afy
+
+# Ensure the environment is activated by default in the shell
+RUN echo "conda activate app_env" >> ~/.bashrc
+ENV CONDA_DEFAULT_ENV=app_env
+ENV PATH=$CONDA_DIR/envs/app_env/bin:$PATH
+
+# Create admin user directory and install Python packages
+RUN mkdir -p /home/admin/ && chmod -R 777 /home/admin && \
+ ln -s /usr/bin/md5sum /usr/bin/md5 && \
+ conda run -n app_env pip install --no-cache-dir openspg-kag==0.7.0 && \
+ conda run -n app_env pip install --no-cache-dir pemja==0.4.0 && \
+ conda clean -afy && \
+ find $CONDA_DIR -follow -type f -name '*.a' -delete && \
+ find $CONDA_DIR -follow -type f -name '*.pyc' -delete
diff --git a/dev/release/server/Dockerfile b/dev/release/server/Dockerfile
index beab576b1..19aed97a9 100644
--- a/dev/release/server/Dockerfile
+++ b/dev/release/server/Dockerfile
@@ -12,25 +12,47 @@
# Image for building OpenSPG releases. Based on Ubuntu 20.04.
#
# Includes:
-# * Java 8
+# * Java 17
-FROM ubuntu:20.04
+# Base image with conda installed at /opt/conda
+FROM registry.cn-hangzhou.aliyuncs.com/lacogito/conda-forge:ubuntu2204
+
+# Replace apt sources with Aliyun mirror for faster downloads
+RUN sed -i 's/archive.ubuntu.com/mirrors.aliyun.com/g' /etc/apt/sources.list && \
+ sed -i 's/security.ubuntu.com/mirrors.aliyun.com/g' /etc/apt/sources.list
# For apt to be noninteractive
-ENV DEBIAN_FRONTEND noninteractive
-ENV DEBCONF_NONINTERACTIVE_SEEN true
-ENV LANG C.UTF-8
+ENV DEBIAN_FRONTEND=noninteractive
+ENV DEBCONF_NONINTERACTIVE_SEEN=true
+ENV LANG=C.UTF-8
-ARG TARGETPLATFORM
ARG APT_INSTALL="apt-get install --no-install-recommends -y"
RUN apt-get clean && apt-get update && \
- $APT_INSTALL git less vim && \
- # Install openjdk 8.
- $APT_INSTALL openjdk-8-jdk && \
- update-alternatives --set java /usr/lib/jvm/java-8-openjdk-`echo $TARGETPLATFORM | cut -d'/' -f2`/jre/bin/java
+ $APT_INSTALL git less vim wget && \
+ $APT_INSTALL openjdk-17-jre && \
+ apt-get clean
+
+RUN JAVA_HOME=$(find /usr/lib/jvm/java-17-openjdk-* -maxdepth 0 -type d | head -1) && \
+ update-alternatives --set java "$JAVA_HOME/bin/java" && \
+ echo "export JAVA_HOME=${JAVA_HOME}" >> /etc/profile
+
+# Create a new environment named 'app_env' with Python 3.10
+RUN conda create -n app_env python=3.10 -y && conda clean -afy
+
+# Ensure the environment is activated by default in the shell
+RUN echo "conda activate app_env" >> ~/.bashrc
+ENV CONDA_DEFAULT_ENV=app_env
+
+# Install pemja using the conda environment with Python 3.10
+RUN conda run -n app_env pip install --no-cache-dir -i https://mirrors.aliyun.com/pypi/simple/ pemja==0.4.0 && \
+ conda run -n app_env pip install --no-cache-dir -i https://mirrors.aliyun.com/pypi/simple/ openspg-kag>=0.8.0 && \
+ conda clean -afy && \
+ find $CONDA_DIR -follow -type f -name '*.a' -delete && \
+ find $CONDA_DIR -follow -type f -name '*.pyc' -delete
+WORKDIR /app
EXPOSE 8887
-ADD target/arks-sofaboot-0.0.1-SNAPSHOT-executable.jar /
-ENTRYPOINT ["java", "-jar", "arks-sofaboot-0.0.1-SNAPSHOT-executable.jar"]
+ADD target/arks-sofaboot-0.0.1-SNAPSHOT-executable.jar /app/
+ENTRYPOINT ["java", "-jar", "/app/arks-sofaboot-0.0.1-SNAPSHOT-executable.jar"]
diff --git a/dev/release/server/buildx-release-server.sh b/dev/release/server/buildx-release-server.sh
old mode 100644
new mode 100755
index 859256d7a..e7de3d7ab
--- a/dev/release/server/buildx-release-server.sh
+++ b/dev/release/server/buildx-release-server.sh
@@ -1,3 +1,4 @@
+#!/bin/bash
# Copyright 2023 OpenSPG Authors
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
@@ -10,6 +11,6 @@
# or implied.
docker buildx build -f Dockerfile --platform linux/arm64/v8,linux/amd64 --push \
- -t spg-registry.cn-hangzhou.cr.aliyuncs.com/spg/openspg-server:0.7 \
- -t spg-registry.cn-hangzhou.cr.aliyuncs.com/spg/openspg-server:latest \
+ -t registry.cn-hangzhou.aliyuncs.com/lacogito/openspg-server:0.8 \
+ -t registry.cn-hangzhou.aliyuncs.com/lacogito/openspg-server:latest \
.
diff --git a/pom.xml b/pom.xml
index 26f1e290a..0344bcc77 100644
--- a/pom.xml
+++ b/pom.xml
@@ -43,7 +43,7 @@
3.0.9
2.10.1
2.13.4
- 1.8
+ 17
4.0.6
20231018-OPENSOURCE
1.2.11
diff --git a/reasoner/catalog/openspg-catalog/pom.xml b/reasoner/catalog/openspg-catalog/pom.xml
index d325d8829..41c6a5daf 100644
--- a/reasoner/catalog/openspg-catalog/pom.xml
+++ b/reasoner/catalog/openspg-catalog/pom.xml
@@ -24,8 +24,8 @@
reasoner-openspg-catalog
- 8
- 8
+ 17
+ 17
diff --git a/reasoner/common/pom.xml b/reasoner/common/pom.xml
index 9d9d035a2..f8152449e 100644
--- a/reasoner/common/pom.xml
+++ b/reasoner/common/pom.xml
@@ -24,8 +24,8 @@
reasoner-common
- 8
- 8
+ 17
+ 17
diff --git a/reasoner/kgdsl-parser/pom.xml b/reasoner/kgdsl-parser/pom.xml
index af617c431..c4846bbe1 100644
--- a/reasoner/kgdsl-parser/pom.xml
+++ b/reasoner/kgdsl-parser/pom.xml
@@ -24,8 +24,8 @@
reasoner-kgdsl-parser
- 8
- 8
+ 17
+ 17
diff --git a/reasoner/lube-api/pom.xml b/reasoner/lube-api/pom.xml
index 5377885b0..a79e7373d 100644
--- a/reasoner/lube-api/pom.xml
+++ b/reasoner/lube-api/pom.xml
@@ -24,8 +24,8 @@
reasoner-lube-api
- 8
- 8
+ 17
+ 17
diff --git a/reasoner/lube-logical/pom.xml b/reasoner/lube-logical/pom.xml
index 5603aff3e..3ba6f9295 100644
--- a/reasoner/lube-logical/pom.xml
+++ b/reasoner/lube-logical/pom.xml
@@ -24,8 +24,8 @@
reasoner-lube-logical
- 8
- 8
+ 17
+ 17
diff --git a/reasoner/pom.xml b/reasoner/pom.xml
index ab97c3f72..61788683d 100644
--- a/reasoner/pom.xml
+++ b/reasoner/pom.xml
@@ -45,11 +45,11 @@
2.0.0
2.7.2
3.1.0
- 1.8
+ 17
4.0.6
0.1.26_20230919
- 8
- 8
+ 17
+ 17
0.37.9-public
1.10.0
UTF-8
diff --git a/reasoner/runner/local-runner/pom.xml b/reasoner/runner/local-runner/pom.xml
index 51fac9a5e..edab54a07 100644
--- a/reasoner/runner/local-runner/pom.xml
+++ b/reasoner/runner/local-runner/pom.xml
@@ -24,8 +24,8 @@
reasoner-local-runner
- 8
- 8
+ 17
+ 17
diff --git a/reasoner/runner/runner-common/pom.xml b/reasoner/runner/runner-common/pom.xml
index 5e57d4015..1bab5a569 100644
--- a/reasoner/runner/runner-common/pom.xml
+++ b/reasoner/runner/runner-common/pom.xml
@@ -24,8 +24,8 @@
reasoner-runner-common
- 8
- 8
+ 17
+ 17
diff --git a/reasoner/udf/pom.xml b/reasoner/udf/pom.xml
index c9b18f8b6..4dbcd228d 100644
--- a/reasoner/udf/pom.xml
+++ b/reasoner/udf/pom.xml
@@ -23,8 +23,8 @@
reasoner-udf
- 8
- 8
+ 17
+ 17
diff --git a/server/api/http-server/src/main/java/com/antgroup/openspg/server/api/http/server/OpenApiConfig.java b/server/api/http-server/src/main/java/com/antgroup/openspg/server/api/http/server/OpenApiConfig.java
new file mode 100644
index 000000000..8c40544d2
--- /dev/null
+++ b/server/api/http-server/src/main/java/com/antgroup/openspg/server/api/http/server/OpenApiConfig.java
@@ -0,0 +1,39 @@
+/*
+ * Copyright 2023 OpenSPG Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
+ * in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License
+ * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
+ * or implied.
+ */
+
+package com.antgroup.openspg.server.api.http.server;
+
+import io.swagger.v3.oas.models.OpenAPI;
+import io.swagger.v3.oas.models.info.Info;
+import io.swagger.v3.oas.models.info.License;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+
+@Configuration
+public class OpenApiConfig {
+
+ @Bean
+ public OpenAPI customOpenAPI() {
+ return new OpenAPI()
+ .info(
+ new Info()
+ .title("OpenSPG API")
+ .version("0.8.0")
+ .description(
+ "OpenSPG - Semantic-enhanced Programmable Graph framework API documentation")
+ .license(
+ new License()
+ .name("Apache 2.0")
+ .url("http://www.apache.org/licenses/LICENSE-2.0.html")));
+ }
+}
diff --git a/server/api/http-server/src/main/java/com/antgroup/openspg/server/api/http/server/WebMvcConfig.java b/server/api/http-server/src/main/java/com/antgroup/openspg/server/api/http/server/WebMvcConfig.java
index e8584840b..d55ed2ece 100644
--- a/server/api/http-server/src/main/java/com/antgroup/openspg/server/api/http/server/WebMvcConfig.java
+++ b/server/api/http-server/src/main/java/com/antgroup/openspg/server/api/http/server/WebMvcConfig.java
@@ -31,6 +31,7 @@ public void extendMessageConverters(List> converters) {
gsonHttpMessageConverter.setGson(SchemaJsonUtils.gson);
gsonHttpMessageConverter.setSupportedMediaTypes(
Collections.singletonList(MediaType.APPLICATION_JSON));
- converters.add(0, gsonHttpMessageConverter);
+ // Add Gson converter at the end, not at the beginning, to allow OpenAPI to use Jackson
+ converters.add(converters.size(), gsonHttpMessageConverter);
}
}
diff --git a/server/arks/sofaboot/src/main/java/com/antgroup/openspg/server/arks/sofaboot/Application.java b/server/arks/sofaboot/src/main/java/com/antgroup/openspg/server/arks/sofaboot/Application.java
index f175ea88a..5a76cda7e 100644
--- a/server/arks/sofaboot/src/main/java/com/antgroup/openspg/server/arks/sofaboot/Application.java
+++ b/server/arks/sofaboot/src/main/java/com/antgroup/openspg/server/arks/sofaboot/Application.java
@@ -29,7 +29,12 @@
@SpringBootApplication
@ComponentScan(basePackages = "com.antgroup.openspg")
@ImportResource({"classpath*:spring/*.xml"})
-@PropertySource(value = "classpath:config/application-default.properties")
+@PropertySource(
+ value = {
+ "classpath:config/application-default.properties",
+ "classpath:config/application-${env}.properties"
+ },
+ ignoreResourceNotFound = true)
@EnableScheduling
public class Application {
diff --git a/server/arks/sofaboot/src/main/resources/config/application-default.properties b/server/arks/sofaboot/src/main/resources/config/application-default.properties
index 25e475031..e9065a429 100644
--- a/server/arks/sofaboot/src/main/resources/config/application-default.properties
+++ b/server/arks/sofaboot/src/main/resources/config/application-default.properties
@@ -51,8 +51,8 @@ jasypt.encryptor.password=openspg
cloudext.graphstore.url=neo4j://release-openspg-neo4j:7687?user=neo4j&password=neo4j@openspg&database=neo4j
cloudext.searchengine.url=neo4j://release-openspg-neo4j:7687?user=neo4j&password=neo4j@openspg&database=neo4j
-python.exec=/home/admin/miniconda3/bin/python
-python.paths=/home/admin/miniconda3/lib/python3.10/site-packages/
+python.exec=/opt/conda/envs/app_env/bin/python
+python.paths=/opt/conda/envs/app_env/lib/python3.10/site-packages/
# Scheduler
scheduler.handler.type=local
diff --git a/server/arks/sofaboot/src/main/resources/config/application-local.properties b/server/arks/sofaboot/src/main/resources/config/application-local.properties
new file mode 100644
index 000000000..af6f88a00
--- /dev/null
+++ b/server/arks/sofaboot/src/main/resources/config/application-local.properties
@@ -0,0 +1,58 @@
+#
+# Copyright 2023 OpenSPG Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
+# in compliance with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software distributed under the License
+# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
+# or implied.
+#
+
+# Local development configuration for VS Code
+
+# Environment
+env=local
+
+# Server
+server.port=8887
+spring.application.name=openspg
+spring.servlet.multipart.max-file-size=100GB
+spring.servlet.multipart.max-request-size=100GB
+spring.main.allow-circular-references=true
+
+# Logging
+logging.level.com.alipay.sofa=info
+logging.level.com.antgroup.openspg=debug
+logging.path=./logs
+
+# Schema
+schema.uri=http://127.0.0.1:8887
+
+# MySQL Database
+server.repository.driver=com.antgroup.openspg.server.infra.dao.JdbcRepositoryClientDriver
+server.repository.impl.jdbc.url=jdbc:mysql://127.0.0.1:3306/openspg?useUnicode=true&characterEncoding=utf8&autoReconnect=true
+server.repository.impl.jdbc.host=127.0.0.1
+server.repository.impl.jdbc.port=3306
+server.repository.impl.jdbc.username=root
+server.repository.impl.jdbc.password=openspg
+server.repository.impl.jdbc.driver=com.mysql.jdbc.Driver
+
+# Encryption
+jasypt.encryptor.password=openspg
+
+# Neo4j Graph Store (本地开发)
+cloudext.graphstore.url=neo4j://127.0.0.1:7687?user=neo4j&password=neo4j@openspg&database=neo4j
+cloudext.searchengine.url=neo4j://127.0.0.1:7687?user=neo4j&password=neo4j@openspg&database=neo4j
+
+# Python (可选,如果需要 Python 支持)
+# python.exec=/usr/local/bin/python3
+# python.paths=/usr/local/lib/python3.10/site-packages/
+
+# Scheduler
+scheduler.handler.type=local
+scheduler.metadata.store.type=local
+scheduler.handler.process.period=300
+scheduler.execute.max.day=10
\ No newline at end of file
diff --git a/server/common/service/pom.xml b/server/common/service/pom.xml
index 2ad5ba0cf..cac9ec51b 100644
--- a/server/common/service/pom.xml
+++ b/server/common/service/pom.xml
@@ -127,6 +127,10 @@
tomcat
jasper-runtime
+
+ jdk.tools
+ jdk.tools
+
diff --git a/server/common/service/src/main/java/com/antgroup/openspg/server/common/service/account/impl/AccountServiceLocalImpl.java b/server/common/service/src/main/java/com/antgroup/openspg/server/common/service/account/impl/AccountServiceLocalImpl.java
new file mode 100644
index 000000000..c71d04fd2
--- /dev/null
+++ b/server/common/service/src/main/java/com/antgroup/openspg/server/common/service/account/impl/AccountServiceLocalImpl.java
@@ -0,0 +1,108 @@
+/*
+ * Copyright 2023 OpenSPG Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
+ * in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License
+ * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
+ * or implied.
+ */
+package com.antgroup.openspg.server.common.service.account.impl;
+
+import com.antgroup.openspg.server.api.facade.Paged;
+import com.antgroup.openspg.server.api.http.client.account.AccountService;
+import com.antgroup.openspg.server.common.model.account.Account;
+import java.io.IOException;
+import java.util.Collections;
+import java.util.List;
+import javax.servlet.http.Cookie;
+import javax.servlet.http.HttpServletResponse;
+import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
+import org.springframework.stereotype.Service;
+
+@Service
+@ConditionalOnProperty(name = "env", havingValue = "local")
+public class AccountServiceLocalImpl implements AccountService {
+
+ private static final String DEFAULT_USER_NO = "local_user";
+ private static final String DEFAULT_USER_NAME = "Local Developer";
+
+ @Override
+ public Account getLoginUser() {
+ Account account = new Account();
+ account.setAccount(DEFAULT_USER_NO);
+ account.setRealName(DEFAULT_USER_NAME);
+ account.setWorkNo(DEFAULT_USER_NO);
+ return account;
+ }
+
+ @Override
+ public List getAccountByKeyword(String keyword) {
+ return Collections.singletonList(getLoginUser());
+ }
+
+ @Override
+ public Account getByUserNo(String userNo) {
+ if (DEFAULT_USER_NO.equals(userNo)) {
+ return getLoginUser();
+ }
+ return null;
+ }
+
+ @Override
+ public Account getWithPrivateByUserNo(String userNo) {
+ return getByUserNo(userNo);
+ }
+
+ @Override
+ public Integer create(Account account) {
+ return 1;
+ }
+
+ @Override
+ public Integer updatePassword(Account account) {
+ return 1;
+ }
+
+ @Override
+ public Integer deleteAccount(String workNo) {
+ return 1;
+ }
+
+ @Override
+ public Paged getAccountList(String account, Integer page, Integer size) {
+ List accounts = Collections.singletonList(getLoginUser());
+ Paged paged = new Paged(size, page);
+ paged.setResults(accounts);
+ paged.setTotal(1L);
+ return paged;
+ }
+
+ @Override
+ public String getSha256HexPassword(String password, String salt) {
+ return "hashed_password";
+ }
+
+ @Override
+ public Account getCurrentAccount(Cookie[] cookies) throws IOException {
+ return getLoginUser();
+ }
+
+ @Override
+ public boolean login(Account account, HttpServletResponse response) {
+ return true;
+ }
+
+ @Override
+ public String logout(String workNo, String redirectUrl) {
+ return redirectUrl;
+ }
+
+ @Override
+ public int updateUserConfig(Account account, Cookie[] cookies) {
+ return 1;
+ }
+}
diff --git a/specs/openspg-rest-api-usage.md b/specs/openspg-rest-api-usage.md
new file mode 100644
index 000000000..7ccf25cd0
--- /dev/null
+++ b/specs/openspg-rest-api-usage.md
@@ -0,0 +1,1321 @@
+# OpenSPG REST API Usage Guide
+
+This guide provides comprehensive examples for using OpenSPG via REST API, covering tenant/project management, schema definition, data ingestion, querying, and reasoning.
+
+## Table of Contents
+
+- [Base URL and Response Format](#base-url-and-response-format)
+- [1. Tenant Management](#1-tenant-management)
+- [2. Project Management](#2-project-management)
+- [3. Schema Management](#3-schema-management)
+- [4. Data Ingestion](#4-data-ingestion)
+- [5. Graph Operations](#5-graph-operations)
+- [6. Querying](#6-querying)
+- [7. Search](#7-search)
+- [8. Reasoning](#8-reasoning)
+- [9. Builder Jobs](#9-builder-jobs)
+- [10. Scheduler](#10-scheduler)
+- [Complete Workflow Example](#complete-workflow-example)
+
+---
+
+## Base URL and Response Format
+
+**Base URL**: `http://localhost:8887`
+
+### Standard Response Format
+
+All APIs return responses in one of two formats:
+
+**Format 1 (HttpResult)**:
+
+```json
+{
+ "success": true,
+ "result": { ... },
+ "errorCode": null,
+ "errorMsg": null,
+ "traceId": "xxx"
+}
+```
+
+**Format 2 (ApiResponse)**:
+
+```json
+{
+ "success": true,
+ "data": { ... },
+ "errorMsg": null,
+ "traceId": "xxx"
+}
+```
+
+---
+
+## 1. Tenant Management
+
+### Create Tenant
+
+```bash
+curl -X POST "http://localhost:8887/public/v1/tenant" \
+ -H "Content-Type: application/json" \
+ -d '{
+ "name": "MyTenant",
+ "desc": "My organization tenant"
+ }'
+```
+
+**Response**:
+
+```json
+{
+ "success": true,
+ "data": {
+ "id": 1,
+ "name": "MyTenant",
+ "description": "My organization tenant"
+ }
+}
+```
+
+### Query Tenants
+
+```bash
+# Query all tenants
+curl -X GET "http://localhost:8887/public/v1/tenant"
+
+# Query specific tenant
+curl -X GET "http://localhost:8887/public/v1/tenant?tenantId=1"
+```
+
+---
+
+## 2. Project Management
+
+### Create Project
+
+A project is the main workspace for your knowledge graph. When creating a project with `autoSchema=true` (default), a set of default entity types will be created automatically.
+
+```bash
+curl -X POST "http://localhost:8887/public/v1/project" \
+ -H "Content-Type: application/json" \
+ -d '{
+ "name": "RiskAnalysis",
+ "description": "Risk mining knowledge graph",
+ "namespace": "RiskAnalysis",
+ "tenantId": 1,
+ "userNo": "admin_user",
+ "tag": "LOCAL",
+ "visibility": "PUBLIC",
+ "autoSchema": true,
+ "config": {
+ "vectorizer": {
+ "type": "openai",
+ "model": "text-embedding-ada-002",
+ "apiKey": "your-api-key"
+ }
+ }
+ }'
+```
+
+**Parameters**:
+- `name`: Project display name
+- `namespace`: Unique namespace for schema types (min 3 characters)
+- `userNo`: User account (6-20 chars, letters/numbers/underscores only)
+- `tag`: `LOCAL` or `PUBLIC_NET`
+- `visibility`: `PUBLIC` or `PRIVATE`
+- `autoSchema`: If true, creates default entity types (Chunk, Person, Organization, etc.)
+- `config.vectorizer`: Required when tag=LOCAL
+
+**Response**:
+
+```json
+{
+ "success": true,
+ "data": {
+ "id": 1,
+ "name": "RiskAnalysis",
+ "namespace": "RiskAnalysis",
+ "tenantId": 1,
+ "description": "Risk mining knowledge graph",
+ "tag": "LOCAL"
+ }
+}
+```
+
+### Query Projects
+
+```bash
+# Query all projects
+curl -X GET "http://localhost:8887/public/v1/project"
+
+# Query by tenant
+curl -X GET "http://localhost:8887/public/v1/project?tenantId=1"
+
+# Query specific project
+curl -X GET "http://localhost:8887/public/v1/project?projectId=1"
+```
+
+### Update Project
+
+```bash
+curl -X POST "http://localhost:8887/public/v1/project/update" \
+ -H "Content-Type: application/json" \
+ -d '{
+ "id": 1,
+ "name": "RiskAnalysis Updated",
+ "userNo": "admin_user",
+ "config": {
+ "vectorizer": {
+ "type": "openai",
+ "model": "text-embedding-3-small"
+ }
+ }
+ }'
+```
+
+---
+
+## 3. Schema Management
+
+### Query Project Schema
+
+Get all schema types defined in a project:
+
+```bash
+curl -X GET "http://localhost:8887/public/v1/schema/queryProjectSchema?projectId=1"
+```
+
+**Response**:
+
+```json
+{
+ "success": true,
+ "data": {
+ "projectId": 1,
+ "spgTypes": [
+ {
+ "spgTypeEnum": "ENTITY_TYPE",
+ "basicInfo": {
+ "name": {
+ "namespace": "RiskAnalysis",
+ "name": "Person"
+ },
+ "nameZh": "人物",
+ "desc": ""
+ },
+ "properties": [...],
+ "relations": [...]
+ }
+ ]
+ }
+}
+```
+
+### Alter Schema (Create/Update Types)
+
+Create custom entity types with properties and relations:
+
+```bash
+curl -X POST "http://localhost:8887/public/v1/schema/alterSchema" \
+ -H "Content-Type: application/json" \
+ -d '{
+ "projectId": 1,
+ "schemaDraft": {
+ "alterSpgTypes": [
+ {
+ "spgTypeEnum": "ENTITY_TYPE",
+ "alterOperation": "CREATE",
+ "basicInfo": {
+ "name": {
+ "namespace": "RiskAnalysis",
+ "name": "Company"
+ },
+ "nameZh": "公司",
+ "desc": "Company entity type"
+ },
+ "properties": [
+ {
+ "alterOperation": "CREATE",
+ "basicInfo": {
+ "name": {
+ "name": "registeredCapital"
+ },
+ "nameZh": "注册资本",
+ "desc": "Company registered capital"
+ },
+ "objectTypeRef": {
+ "basicInfo": {
+ "name": {
+ "name": "Float"
+ }
+ },
+ "spgTypeEnum": "BASIC_TYPE"
+ }
+ },
+ {
+ "alterOperation": "CREATE",
+ "basicInfo": {
+ "name": {
+ "name": "industry"
+ },
+ "nameZh": "行业",
+ "desc": "Industry category"
+ },
+ "objectTypeRef": {
+ "basicInfo": {
+ "name": {
+ "name": "Text"
+ }
+ },
+ "spgTypeEnum": "BASIC_TYPE"
+ },
+ "advancedConfig": {
+ "indexType": "TEXT"
+ }
+ },
+ {
+ "alterOperation": "CREATE",
+ "basicInfo": {
+ "name": {
+ "name": "description"
+ },
+ "nameZh": "描述",
+ "desc": "Company description"
+ },
+ "objectTypeRef": {
+ "basicInfo": {
+ "name": {
+ "name": "Text"
+ }
+ },
+ "spgTypeEnum": "BASIC_TYPE"
+ },
+ "advancedConfig": {
+ "indexType": "TEXT_AND_VECTOR"
+ }
+ }
+ ],
+ "relations": [
+ {
+ "alterOperation": "CREATE",
+ "basicInfo": {
+ "name": {
+ "name": "locatedIn"
+ },
+ "nameZh": "位于",
+ "desc": "Company location"
+ },
+ "objectTypeRef": {
+ "basicInfo": {
+ "name": {
+ "namespace": "RiskAnalysis",
+ "name": "GeographicLocation"
+ }
+ },
+ "spgTypeEnum": "ENTITY_TYPE"
+ }
+ }
+ ]
+ }
+ ]
+ }
+ }'
+```
+
+**Index Types**:
+- `TEXT`: Full-text search index
+- `VECTOR`: Vector similarity search index
+- `TEXT_AND_VECTOR`: Both text and vector indexing
+
+**Basic Property Types**:
+- `Text`: String values
+- `Integer`: Integer numbers
+- `Float`: Floating point numbers
+- `STD.Email`: Email addresses
+- `STD.Phone`: Phone numbers
+
+### Query Specific SPG Type
+
+```bash
+curl -X GET "http://localhost:8887/public/v1/schema/querySpgType?name=RiskAnalysis.Company"
+```
+
+### Query Relation
+
+```bash
+curl -X GET "http://localhost:8887/public/v1/schema/queryRelation?sName=RiskAnalysis.Company&relation=locatedIn&oName=RiskAnalysis.GeographicLocation"
+```
+
+### Query Built-in Properties
+
+```bash
+curl -X GET "http://localhost:8887/public/v1/schema/queryBuiltInProperty?spgTypeEnum=ENTITY_TYPE"
+```
+
+---
+
+## 4. Data Ingestion
+
+### Upsert Vertices (Entities)
+
+Insert or update entity instances:
+
+```bash
+curl -X POST "http://localhost:8887/public/v1/graph/upsertVertex" \
+ -H "Content-Type: application/json" \
+ -d '{
+ "projectId": 1,
+ "vertices": [
+ {
+ "type": "RiskAnalysis.Company",
+ "id": "company_001",
+ "properties": {
+ "name": "Acme Corporation",
+ "registeredCapital": 10000000.0,
+ "industry": "Technology",
+ "description": "A leading technology company specializing in AI solutions"
+ }
+ },
+ {
+ "type": "RiskAnalysis.Company",
+ "id": "company_002",
+ "properties": {
+ "name": "Global Trading Inc",
+ "registeredCapital": 5000000.0,
+ "industry": "Trading",
+ "description": "International trading company"
+ }
+ },
+ {
+ "type": "RiskAnalysis.Person",
+ "id": "person_001",
+ "properties": {
+ "name": "John Smith",
+ "semanticType": "CEO"
+ }
+ },
+ {
+ "type": "RiskAnalysis.GeographicLocation",
+ "id": "location_001",
+ "properties": {
+ "name": "New York City"
+ }
+ }
+ ]
+ }'
+```
+
+**Response**:
+
+```json
+{
+ "success": true,
+ "data": {
+ "affectedVerticesCount": 4,
+ "affectedEdgesCount": 0
+ }
+}
+```
+
+### Upsert Vertices with Vectors
+
+For entities with vector-indexed properties, you can provide pre-computed vectors:
+
+```bash
+curl -X POST "http://localhost:8887/public/v1/graph/upsertVertex" \
+ -H "Content-Type: application/json" \
+ -d '{
+ "projectId": 1,
+ "vertices": [
+ {
+ "type": "RiskAnalysis.Chunk",
+ "id": "chunk_001",
+ "properties": {
+ "content": "The company has shown strong growth in Q4 2024..."
+ },
+ "vectors": {
+ "content": [0.1, 0.2, 0.3, ...]
+ }
+ }
+ ]
+ }'
+```
+
+### Upsert Edges (Relations)
+
+Insert or update relationships between entities:
+
+```bash
+curl -X POST "http://localhost:8887/public/v1/graph/upsertEdge" \
+ -H "Content-Type: application/json" \
+ -d '{
+ "projectId": 1,
+ "upsertAdjacentVertices": false,
+ "edges": [
+ {
+ "srcType": "RiskAnalysis.Person",
+ "srcId": "person_001",
+ "dstType": "RiskAnalysis.Company",
+ "dstId": "company_001",
+ "label": "worksAt",
+ "properties": {
+ "position": "CEO",
+ "startDate": "2020-01-01"
+ }
+ },
+ {
+ "srcType": "RiskAnalysis.Company",
+ "srcId": "company_001",
+ "dstType": "RiskAnalysis.GeographicLocation",
+ "dstId": "location_001",
+ "label": "locatedIn",
+ "properties": {}
+ },
+ {
+ "srcType": "RiskAnalysis.Company",
+ "srcId": "company_001",
+ "dstType": "RiskAnalysis.Company",
+ "dstId": "company_002",
+ "label": "partnerOf",
+ "properties": {
+ "partnershipType": "Strategic"
+ }
+ }
+ ]
+ }'
+```
+
+**Parameters**:
+- `upsertAdjacentVertices`: If true, automatically creates source/destination vertices if they don't exist
+
+### Write SubGraph
+
+Write a complete subgraph (nodes and edges) in a single operation:
+
+```bash
+curl -X POST "http://localhost:8887/public/v1/graph/writerGraph" \
+ -H "Content-Type: application/json" \
+ -d '{
+ "projectId": 1,
+ "operation": "UPSERT",
+ "enableLeadTo": false,
+ "subGraph": {
+ "resultNodes": [
+ {
+ "id": "company_003",
+ "bizId": "company_003",
+ "name": "Tech Startup",
+ "label": "RiskAnalysis.Company",
+ "properties": {
+ "registeredCapital": 1000000.0,
+ "industry": "AI"
+ }
+ },
+ {
+ "id": "person_002",
+ "bizId": "person_002",
+ "name": "Jane Doe",
+ "label": "RiskAnalysis.Person",
+ "properties": {
+ "semanticType": "Founder"
+ }
+ }
+ ],
+ "resultEdges": [
+ {
+ "from": "person_002",
+ "fromId": "person_002",
+ "fromType": "RiskAnalysis.Person",
+ "to": "company_003",
+ "toId": "company_003",
+ "toType": "RiskAnalysis.Company",
+ "label": "foundedBy",
+ "properties": {
+ "foundedYear": "2023"
+ }
+ }
+ ]
+ }
+ }'
+```
+
+**Operations**:
+- `UPSERT`: Insert or update
+- `DELETE`: Delete nodes/edges
+
+---
+
+## 5. Graph Operations
+
+### Delete Vertices
+
+```bash
+curl -X POST "http://localhost:8887/public/v1/graph/deleteVertex" \
+ -H "Content-Type: application/json" \
+ -d '{
+ "projectId": 1,
+ "vertices": [
+ {
+ "type": "RiskAnalysis.Company",
+ "id": "company_003"
+ }
+ ]
+ }'
+```
+
+### Delete Edges
+
+```bash
+curl -X POST "http://localhost:8887/public/v1/graph/deleteEdge" \
+ -H "Content-Type: application/json" \
+ -d '{
+ "projectId": 1,
+ "edges": [
+ {
+ "srcType": "RiskAnalysis.Person",
+ "srcId": "person_001",
+ "dstType": "RiskAnalysis.Company",
+ "dstId": "company_001",
+ "label": "worksAt"
+ }
+ ]
+ }'
+```
+
+### Get All Labels
+
+```bash
+curl -X GET "http://localhost:8887/public/v1/graph/allLabels?projectId=1"
+```
+
+### Get PageRank Scores
+
+```bash
+curl -X POST "http://localhost:8887/public/v1/graph/getPageRankScores" \
+ -H "Content-Type: application/json" \
+ -d '{
+ "projectId": 1,
+ "spgType": "RiskAnalysis.Company",
+ "topK": 10
+ }'
+```
+
+---
+
+## 6. Querying
+
+### Query SPG Type Instances
+
+Query entities by type and IDs:
+
+```bash
+curl -X POST "http://localhost:8887/public/v1/query/spgType" \
+ -H "Content-Type: application/json" \
+ -d '{
+ "projectId": 1,
+ "spgType": "RiskAnalysis.Company",
+ "ids": ["company_001", "company_002"]
+ }'
+```
+
+**Response**:
+
+```json
+{
+ "success": true,
+ "data": [
+ {
+ "id": "company_001",
+ "type": "RiskAnalysis.Company",
+ "properties": {
+ "name": "Acme Corporation",
+ "registeredCapital": 10000000.0,
+ "industry": "Technology"
+ }
+ },
+ {
+ "id": "company_002",
+ "type": "RiskAnalysis.Company",
+ "properties": {
+ "name": "Global Trading Inc",
+ "registeredCapital": 5000000.0,
+ "industry": "Trading"
+ }
+ }
+ ]
+}
+```
+
+### Sampling SPG Type
+
+Get random samples of a specific entity type:
+
+```bash
+curl -X GET "http://localhost:8887/public/v1/sampling/spgType?projectId=1&spgType=RiskAnalysis.Company&limit=10"
+```
+
+### Sampling Relations
+
+Get random samples of relationships:
+
+```bash
+curl -X GET "http://localhost:8887/public/v1/sampling/relation?projectId=1&sType=RiskAnalysis.Person&relation=worksAt&oType=RiskAnalysis.Company&limit=10"
+```
+
+---
+
+## 7. Search
+
+### Text Search
+
+Full-text search across entities:
+
+```bash
+curl -X POST "http://localhost:8887/public/v1/search/text" \
+ -H "Content-Type: application/json" \
+ -d '{
+ "projectId": 1,
+ "queryString": "technology company AI",
+ "labelConstraints": ["RiskAnalysis.Company"],
+ "topk": 10,
+ "page": 0
+ }'
+```
+
+**Response**:
+
+```json
+{
+ "success": true,
+ "data": [
+ {
+ "docId": "company_001",
+ "label": "RiskAnalysis.Company",
+ "score": 0.95,
+ "properties": {
+ "name": "Acme Corporation",
+ "description": "A leading technology company specializing in AI solutions"
+ }
+ }
+ ]
+}
+```
+
+### Vector Search
+
+Semantic similarity search using vectors:
+
+```bash
+curl -X POST "http://localhost:8887/public/v1/search/vector" \
+ -H "Content-Type: application/json" \
+ -d '{
+ "projectId": 1,
+ "label": "RiskAnalysis.Chunk",
+ "propertyKey": "content",
+ "queryVector": [0.1, 0.2, 0.3, ...],
+ "topk": 5,
+ "efSearch": 100
+ }'
+```
+
+### SPG Type Search
+
+Search entities of a specific type:
+
+```bash
+curl -X GET "http://localhost:8887/public/v1/search/spgType?projectId=1&spgTypeName=RiskAnalysis.Company&keyword=technology&pageNo=0&pageSize=10"
+```
+
+### Custom Search
+
+Execute custom search queries:
+
+```bash
+curl -X POST "http://localhost:8887/public/v1/search/custom" \
+ -H "Content-Type: application/json" \
+ -d '{
+ "projectId": 1,
+ "indexName": "RiskAnalysis.Company",
+ "query": {
+ "bool": {
+ "must": [
+ {"match": {"industry": "Technology"}}
+ ],
+ "filter": [
+ {"range": {"registeredCapital": {"gte": 1000000}}}
+ ]
+ }
+ },
+ "topk": 10
+ }'
+```
+
+---
+
+## 8. Reasoning
+
+### Run Reasoning Query (KGDSL)
+
+Execute KGDSL (Knowledge Graph DSL) queries for graph traversal and reasoning:
+
+```bash
+curl -X POST "http://localhost:8887/public/v1/reason/run" \
+ -H "Content-Type: application/json" \
+ -d '{
+ "projectId": 1,
+ "dsl": "MATCH (p:RiskAnalysis.Person)-[r:worksAt]->(c:RiskAnalysis.Company) WHERE c.industry = $industry RETURN p, r, c",
+ "params": {
+ "industry": "Technology"
+ }
+ }'
+```
+
+**Response**:
+
+```json
+{
+ "success": true,
+ "data": {
+ "projectId": 1,
+ "task": {
+ "taskId": "task_12345",
+ "status": "FINISHED",
+ "resultNodes": [
+ {
+ "id": "person_001",
+ "type": "RiskAnalysis.Person",
+ "properties": {"name": "John Smith"}
+ },
+ {
+ "id": "company_001",
+ "type": "RiskAnalysis.Company",
+ "properties": {"name": "Acme Corporation"}
+ }
+ ],
+ "resultEdges": [
+ {
+ "srcId": "person_001",
+ "dstId": "company_001",
+ "type": "worksAt"
+ }
+ ]
+ }
+ }
+}
+```
+
+### Common KGDSL Patterns
+
+**Find connected entities**:
+
+```
+MATCH (a:RiskAnalysis.Company)-[*1..3]-(b:RiskAnalysis.Company)
+WHERE a.id = 'company_001'
+RETURN b
+```
+
+**Path finding**:
+
+```
+MATCH path = (a:RiskAnalysis.Person)-[*..5]->(b:RiskAnalysis.Company)
+WHERE a.id = 'person_001' AND b.id = 'company_002'
+RETURN path
+```
+
+**Aggregation**:
+
+```
+MATCH (c:RiskAnalysis.Company)
+WHERE c.industry = 'Technology'
+RETURN count(c) as total, avg(c.registeredCapital) as avgCapital
+```
+
+### Thinker Query
+
+Execute intelligent reasoning with the Thinker engine:
+
+```bash
+curl -X POST "http://localhost:8887/public/v1/reason/thinker" \
+ -H "Content-Type: application/json" \
+ -d '{
+ "projectId": 1,
+ "subject": "RiskAnalysis.Person/person_001",
+ "predicate": "potentialRisk",
+ "object": null,
+ "mode": "INFER",
+ "params": "{}"
+ }'
+```
+
+**Modes**:
+- `INFER`: Inference mode - deduce new relationships
+- `VERIFY`: Verification mode - validate existing relationships
+
+### Get Reasoning Schema
+
+Get schema information for reasoning:
+
+```bash
+curl -X GET "http://localhost:8887/public/v1/reason/schema?projectId=1"
+```
+
+---
+
+## 9. Builder Jobs
+
+### Submit KAG Builder Job
+
+Submit a knowledge graph building job:
+
+```bash
+curl -X POST "http://localhost:8887/public/v1/builder/kag/submit" \
+ -H "Content-Type: application/json" \
+ -d '{
+ "projectId": 1,
+ "userNumber": "admin_user",
+ "command": "python build_kg.py --input /data/documents/ --output /data/graph/",
+ "workerNum": 2,
+ "workerCpu": 2.0,
+ "workerMemory": 4096,
+ "image": "openspg/builder:latest",
+ "envs": {
+ "OPENAI_API_KEY": "your-api-key"
+ }
+ }'
+```
+
+**Response**:
+
+```json
+{
+ "success": true,
+ "result": {
+ "id": 1,
+ "jobName": "KAG_COMMAND_admin_user_20240115_120000",
+ "status": "RUNNING",
+ "projectId": 1,
+ "taskId": 100
+ }
+}
+```
+
+### Get Builder Job Status
+
+```bash
+curl -X GET "http://localhost:8887/public/v1/builder/getById?id=1"
+```
+
+### Search Builder Jobs
+
+```bash
+curl -X POST "http://localhost:8887/public/v1/builder/search" \
+ -H "Content-Type: application/json" \
+ -d '{
+ "projectId": 1,
+ "status": "RUNNING",
+ "pageNo": 0,
+ "pageSize": 20
+ }'
+```
+
+### Delete Builder Job
+
+```bash
+curl -X GET "http://localhost:8887/public/v1/builder/delete?id=1"
+```
+
+---
+
+## 10. Scheduler
+
+### Submit Scheduler Job
+
+```bash
+curl -X POST "http://localhost:8887/public/v1/scheduler/job/submit" \
+ -H "Content-Type: application/json" \
+ -d '{
+ "projectId": 1,
+ "name": "Daily Data Sync",
+ "lifeCycle": "PERIOD",
+ "cronExpression": "0 0 2 * * ?",
+ "translateType": "KAG_COMMAND_BUILDER"
+ }'
+```
+
+### Execute Job Immediately
+
+```bash
+curl -X GET "http://localhost:8887/public/v1/scheduler/job/execute?id=1"
+```
+
+### Enable/Disable Job
+
+```bash
+# Enable
+curl -X GET "http://localhost:8887/public/v1/scheduler/job/enable?id=1"
+
+# Disable
+curl -X GET "http://localhost:8887/public/v1/scheduler/job/disable?id=1"
+```
+
+### Get Job Details
+
+```bash
+curl -X GET "http://localhost:8887/public/v1/scheduler/job/getById?id=1"
+```
+
+### Search Jobs
+
+```bash
+curl -X POST "http://localhost:8887/public/v1/scheduler/job/search" \
+ -H "Content-Type: application/json" \
+ -d '{
+ "projectId": 1,
+ "status": "ENABLE",
+ "pageNo": 0,
+ "pageSize": 20
+ }'
+```
+
+### Instance Management
+
+```bash
+# Get instance
+curl -X GET "http://localhost:8887/public/v1/scheduler/instance/getById?id=1"
+
+# Stop instance
+curl -X GET "http://localhost:8887/public/v1/scheduler/instance/stop?id=1"
+
+# Restart instance
+curl -X GET "http://localhost:8887/public/v1/scheduler/instance/restart?id=1"
+
+# Search instances
+curl -X POST "http://localhost:8887/public/v1/scheduler/instance/search" \
+ -H "Content-Type: application/json" \
+ -d '{
+ "jobId": 1,
+ "pageNo": 0,
+ "pageSize": 20
+ }'
+```
+
+---
+
+## Complete Workflow Example
+
+Here's a complete example workflow for building a risk analysis knowledge graph:
+
+### Step 1: Create Tenant and Project
+
+```bash
+# Create tenant
+curl -X POST "http://localhost:8887/public/v1/tenant" \
+ -H "Content-Type: application/json" \
+ -d '{"name": "RiskOrg", "desc": "Risk Analysis Organization"}'
+
+# Create project (note: tenant ID from previous response)
+curl -X POST "http://localhost:8887/public/v1/project" \
+ -H "Content-Type: application/json" \
+ -d '{
+ "name": "RiskMining",
+ "namespace": "RiskMining",
+ "tenantId": 1,
+ "userNo": "risk_admin",
+ "tag": "LOCAL",
+ "visibility": "PUBLIC",
+ "autoSchema": false,
+ "config": {
+ "vectorizer": {"type": "local", "model": "bge-base"}
+ }
+ }'
+```
+
+### Step 2: Define Custom Schema
+
+```bash
+curl -X POST "http://localhost:8887/public/v1/schema/alterSchema" \
+ -H "Content-Type: application/json" \
+ -d '{
+ "projectId": 1,
+ "schemaDraft": {
+ "alterSpgTypes": [
+ {
+ "spgTypeEnum": "ENTITY_TYPE",
+ "alterOperation": "CREATE",
+ "basicInfo": {
+ "name": {"namespace": "RiskMining", "name": "Account"},
+ "nameZh": "账户"
+ },
+ "properties": [
+ {
+ "alterOperation": "CREATE",
+ "basicInfo": {"name": {"name": "accountNumber"}, "nameZh": "账号"},
+ "objectTypeRef": {"basicInfo": {"name": {"name": "Text"}}, "spgTypeEnum": "BASIC_TYPE"}
+ },
+ {
+ "alterOperation": "CREATE",
+ "basicInfo": {"name": {"name": "riskScore"}, "nameZh": "风险分数"},
+ "objectTypeRef": {"basicInfo": {"name": {"name": "Float"}}, "spgTypeEnum": "BASIC_TYPE"}
+ }
+ ]
+ },
+ {
+ "spgTypeEnum": "ENTITY_TYPE",
+ "alterOperation": "CREATE",
+ "basicInfo": {
+ "name": {"namespace": "RiskMining", "name": "Transaction"},
+ "nameZh": "交易"
+ },
+ "properties": [
+ {
+ "alterOperation": "CREATE",
+ "basicInfo": {"name": {"name": "amount"}, "nameZh": "金额"},
+ "objectTypeRef": {"basicInfo": {"name": {"name": "Float"}}, "spgTypeEnum": "BASIC_TYPE"}
+ },
+ {
+ "alterOperation": "CREATE",
+ "basicInfo": {"name": {"name": "timestamp"}, "nameZh": "时间"},
+ "objectTypeRef": {"basicInfo": {"name": {"name": "Text"}}, "spgTypeEnum": "BASIC_TYPE"}
+ }
+ ],
+ "relations": [
+ {
+ "alterOperation": "CREATE",
+ "basicInfo": {"name": {"name": "fromAccount"}, "nameZh": "来自账户"},
+ "objectTypeRef": {"basicInfo": {"name": {"namespace": "RiskMining", "name": "Account"}}, "spgTypeEnum": "ENTITY_TYPE"}
+ },
+ {
+ "alterOperation": "CREATE",
+ "basicInfo": {"name": {"name": "toAccount"}, "nameZh": "到账户"},
+ "objectTypeRef": {"basicInfo": {"name": {"namespace": "RiskMining", "name": "Account"}}, "spgTypeEnum": "ENTITY_TYPE"}
+ }
+ ]
+ }
+ ]
+ }
+ }'
+```
+
+### Step 3: Ingest Data
+
+```bash
+# Insert accounts
+curl -X POST "http://localhost:8887/public/v1/graph/upsertVertex" \
+ -H "Content-Type: application/json" \
+ -d '{
+ "projectId": 1,
+ "vertices": [
+ {"type": "RiskMining.Account", "id": "acc_001", "properties": {"accountNumber": "1234567890", "riskScore": 0.2}},
+ {"type": "RiskMining.Account", "id": "acc_002", "properties": {"accountNumber": "0987654321", "riskScore": 0.8}},
+ {"type": "RiskMining.Account", "id": "acc_003", "properties": {"accountNumber": "1122334455", "riskScore": 0.5}}
+ ]
+ }'
+
+# Insert transactions with relationships
+curl -X POST "http://localhost:8887/public/v1/graph/upsertVertex" \
+ -H "Content-Type: application/json" \
+ -d '{
+ "projectId": 1,
+ "vertices": [
+ {"type": "RiskMining.Transaction", "id": "txn_001", "properties": {"amount": 10000.0, "timestamp": "2024-01-15T10:30:00Z"}},
+ {"type": "RiskMining.Transaction", "id": "txn_002", "properties": {"amount": 50000.0, "timestamp": "2024-01-15T11:45:00Z"}}
+ ]
+ }'
+
+# Insert edges
+curl -X POST "http://localhost:8887/public/v1/graph/upsertEdge" \
+ -H "Content-Type: application/json" \
+ -d '{
+ "projectId": 1,
+ "edges": [
+ {"srcType": "RiskMining.Transaction", "srcId": "txn_001", "dstType": "RiskMining.Account", "dstId": "acc_001", "label": "fromAccount"},
+ {"srcType": "RiskMining.Transaction", "srcId": "txn_001", "dstType": "RiskMining.Account", "dstId": "acc_002", "label": "toAccount"},
+ {"srcType": "RiskMining.Transaction", "srcId": "txn_002", "dstType": "RiskMining.Account", "dstId": "acc_002", "label": "fromAccount"},
+ {"srcType": "RiskMining.Transaction", "srcId": "txn_002", "dstType": "RiskMining.Account", "dstId": "acc_003", "label": "toAccount"}
+ ]
+ }'
+```
+
+### Step 4: Query and Analyze
+
+```bash
+# Find high-risk accounts
+curl -X POST "http://localhost:8887/public/v1/reason/run" \
+ -H "Content-Type: application/json" \
+ -d '{
+ "projectId": 1,
+ "dsl": "MATCH (a:RiskMining.Account) WHERE a.riskScore > 0.7 RETURN a",
+ "params": {}
+ }'
+
+# Find transactions involving high-risk accounts
+curl -X POST "http://localhost:8887/public/v1/reason/run" \
+ -H "Content-Type: application/json" \
+ -d '{
+ "projectId": 1,
+ "dsl": "MATCH (t:RiskMining.Transaction)-[:fromAccount|toAccount]->(a:RiskMining.Account) WHERE a.riskScore > 0.7 RETURN t, a",
+ "params": {}
+ }'
+
+# Find transaction paths between accounts
+curl -X POST "http://localhost:8887/public/v1/reason/run" \
+ -H "Content-Type: application/json" \
+ -d '{
+ "projectId": 1,
+ "dsl": "MATCH path = (a1:RiskMining.Account)-[*..4]-(a2:RiskMining.Account) WHERE a1.id = $startAccount AND a2.id = $endAccount RETURN path",
+ "params": {
+ "startAccount": "acc_001",
+ "endAccount": "acc_003"
+ }
+ }'
+```
+
+---
+
+## Data Source Management
+
+### Register Data Source
+
+```bash
+curl -X POST "http://localhost:8887/public/v1/datasource/insert" \
+ -H "Content-Type: application/json" \
+ -d '{
+ "dbName": "risk_data",
+ "dbUrl": "jdbc:mysql://localhost:3306/risk_data",
+ "dbUser": "root",
+ "dbPassword": "password",
+ "type": "MYSQL",
+ "createUser": "admin"
+ }'
+```
+
+### Test Connection
+
+```bash
+curl -X POST "http://localhost:8887/public/v1/datasource/testConnect" \
+ -H "Content-Type: application/json" \
+ -d '{
+ "dbUrl": "jdbc:mysql://localhost:3306/risk_data",
+ "dbUser": "root",
+ "dbPassword": "password",
+ "type": "MYSQL"
+ }'
+```
+
+### List Tables
+
+```bash
+curl -X GET "http://localhost:8887/public/v1/datasource/getAllTable?id=1&dbName=risk_data&keyword="
+```
+
+### Get Table Schema
+
+```bash
+curl -X GET "http://localhost:8887/public/v1/datasource/getTableDetail?id=1&dbName=risk_data&tableName=transactions"
+```
+
+---
+
+## Concept Management
+
+### Query Concepts
+
+```bash
+curl -X GET "http://localhost:8887/public/v1/concept/queryConcept?conceptTypeName=RiskMining.RiskLevel&conceptName=High"
+```
+
+### Define Dynamic Taxonomy
+
+```bash
+curl -X POST "http://localhost:8887/public/v1/concept/defineDynamicTaxonomy" \
+ -H "Content-Type: application/json" \
+ -d '{
+ "conceptTypeName": "RiskMining.RiskLevel",
+ "conceptName": "High",
+ "dsl": "MATCH (a:RiskMining.Account) WHERE a.riskScore > 0.7 RETURN a"
+ }'
+```
+
+### Define Logical Causation
+
+```bash
+curl -X POST "http://localhost:8887/public/v1/concept/defineLogicalCausation" \
+ -H "Content-Type: application/json" \
+ -d '{
+ "subjectConceptTypeName": "RiskMining.Account",
+ "subjectConceptName": "HighRiskAccount",
+ "objectConceptTypeName": "RiskMining.Alert",
+ "objectConceptName": "RiskAlert",
+ "predicateName": "triggers",
+ "dsl": "MATCH (a:RiskMining.Account)-[:triggers]->(alert:RiskMining.Alert) WHERE a.riskScore > 0.7 RETURN alert"
+ }'
+```
+
+---
+
+## Retrieval Management
+
+### Get Retrievals by Project
+
+```bash
+curl -X GET "http://localhost:8887/public/v1/retrieval/getByProjectId?projectId=1"
+```
+
+### Search Retrievals
+
+```bash
+curl -X POST "http://localhost:8887/public/v1/retrieval/search" \
+ -H "Content-Type: application/json" \
+ -d '{
+ "projectId": 1,
+ "pageNo": 0,
+ "pageSize": 20
+ }'
+```
+
+### Update Retrieval
+
+```bash
+curl -X POST "http://localhost:8887/public/v1/retrieval/update" \
+ -H "Content-Type: application/json" \
+ -d '{
+ "id": 1,
+ "name": "Updated Retrieval",
+ "config": {}
+ }'
+```
+
+---
+
+## Error Handling
+
+All API errors follow this format:
+
+```json
+{
+ "success": false,
+ "errorCode": "INVALID_PARAM",
+ "errorMsg": "projectId cannot be null",
+ "traceId": "trace_12345"
+}
+```
+
+Common error codes:
+- `INVALID_PARAM`: Invalid request parameters
+- `NOT_FOUND`: Resource not found
+- `UNAUTHORIZED`: Authentication required
+- `PERMISSION_DENIED`: Insufficient permissions
+- `INTERNAL_ERROR`: Server-side error
+
+---
+
+## Best Practices
+
+1. **Batch Operations**: Use batch upsert APIs for bulk data ingestion instead of individual inserts
+2. **Indexing**: Configure appropriate index types (TEXT, VECTOR, TEXT_AND_VECTOR) based on query patterns
+3. **Schema Design**: Design your schema with clear entity types and relationships before data ingestion
+4. **Error Handling**: Always check the `success` field in responses and handle errors appropriately
+5. **Pagination**: Use pagination parameters (`page`, `pageSize`) for large result sets
+6. **Namespacing**: Use consistent namespace prefixes for all entity types in a project
+