Compare commits
6 Commits
f5ad79b162
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
| 9cb2780fd6 | |||
| 9fedca93d6 | |||
| fdecce139d | |||
| cf4c6754a0 | |||
| e6d7903174 | |||
| 3acf6be119 |
52
.gitea/workflows/application.yaml
Normal file
@@ -0,0 +1,52 @@
|
||||
apiVersion: application.devstar.cn/v1
|
||||
kind: Application
|
||||
metadata:
|
||||
name: mengningsoftware
|
||||
namespace: web-servers
|
||||
labels:
|
||||
app.kubernetes.io/component: web-server
|
||||
app.kubernetes.io/managed-by: devstar
|
||||
app.kubernetes.io/name: mengningsoftware
|
||||
spec:
|
||||
environment:
|
||||
NGINX_VERSION: "1.24.0"
|
||||
expose: true
|
||||
networkPolicy:
|
||||
gateway:
|
||||
enabled: true
|
||||
hosts:
|
||||
- "mengning.com.cn"
|
||||
ports:
|
||||
- name: http
|
||||
number: 80
|
||||
protocol: HTTP
|
||||
- name: https
|
||||
number: 443
|
||||
protocol: HTTPS
|
||||
tls:
|
||||
- hosts:
|
||||
- "mengning.com.cn"
|
||||
minProtocolVersion: "TLSv1_2"
|
||||
mode: SIMPLE
|
||||
secretName: mengningsoftware-tls
|
||||
secretNamespace: istio-system
|
||||
replicas: 2
|
||||
resources:
|
||||
cpu: "500m"
|
||||
memory: "512Mi"
|
||||
service:
|
||||
enabled: true
|
||||
type: ClusterIP
|
||||
ports:
|
||||
- name: http
|
||||
port: 80
|
||||
protocol: TCP
|
||||
targetPort: http
|
||||
template:
|
||||
type: stateless
|
||||
image: ${DOCKER_REGISTRY_ADDRESS}/${DOCKER_REPOSITORY_ARTIFACT}:${DOCKER_IMAGE_TAG}
|
||||
ports:
|
||||
- name: http
|
||||
port: 80
|
||||
protocol: TCP
|
||||
|
||||
@@ -37,12 +37,18 @@ jobs:
|
||||
- name: 登录 Docker Registry 并推送镜像
|
||||
run: |
|
||||
echo "${{ secrets.DOCKER_REGISTRY_PASSWORD }}" | docker login -u ${{ secrets.DOCKER_REGISTRY_USERNAME }} ${{ vars.DOCKER_REGISTRY_ADDRESS }} --password-stdin
|
||||
docker tag devstar-docs:${{ gitea.sha }} ${{ vars.DOCKER_REGISTRY_ADDRESS }}/devstar/devstar-studio-docs:${{ gitea.sha }}
|
||||
docker tag devstar-docs:${{ gitea.sha }} ${{ vars.DOCKER_REGISTRY_ADDRESS }}/devstar/devstar-studio-docs:latest
|
||||
docker push ${{ vars.DOCKER_REGISTRY_ADDRESS }}/devstar/devstar-studio-docs:${{ gitea.sha }}
|
||||
docker push ${{ vars.DOCKER_REGISTRY_ADDRESS }}/devstar/devstar-studio-docs:latest
|
||||
docker tag devstar-docs:${{ gitea.sha }} ${{ vars.DOCKER_REGISTRY_ADDRESS }}/${{ vars.DOCKER_REPOSITORY_ARTIFACT}}:${{ gitea.sha }}
|
||||
docker tag devstar-docs:${{ gitea.sha }} ${{ vars.DOCKER_REGISTRY_ADDRESS }}/${{ vars.DOCKER_REPOSITORY_ARTIFACT}}:latest
|
||||
docker push ${{ vars.DOCKER_REGISTRY_ADDRESS }}/${{ vars.DOCKER_REPOSITORY_ARTIFACT}}:${{ gitea.sha }}
|
||||
docker push ${{ vars.DOCKER_REGISTRY_ADDRESS }}/${{ vars.DOCKER_REPOSITORY_ARTIFACT}}:latest
|
||||
|
||||
- name: 📝 Update mengning.com.cn
|
||||
- name: 📝 Update mengning.com.cn(通过删除/创建 Application CRD)
|
||||
env:
|
||||
DOCKER_REGISTRY_ADDRESS: ${{ vars.DOCKER_REGISTRY_ADDRESS }}
|
||||
DOCKER_REPOSITORY_ARTIFACT: ${{ vars.DOCKER_REPOSITORY_ARTIFACT }}
|
||||
DOCKER_IMAGE_TAG: ${{ gitea.sha }}
|
||||
TLS_CERTIFICATE: ${{ secrets.TLS_CERTIFICATE }}
|
||||
TLS_PRIVATE_KEY: ${{ secrets.TLS_PRIVATE_KEY }}
|
||||
run: |
|
||||
curl -LO https://mirrors.ustc.edu.cn/kubernetes/core%3A/stable%3A/v1.28/deb/amd64/kubectl_1.28.0-1.1_amd64.deb
|
||||
sudo dpkg -i kubectl_1.28.0-1.1_amd64.deb
|
||||
@@ -50,4 +56,24 @@ jobs:
|
||||
kubectl config set-credentials token-user --token=${{ secrets.K8S_TOKEN }}
|
||||
kubectl config set-context remote-context --cluster=remote-cluster --user=token-user
|
||||
kubectl config use-context remote-context
|
||||
kubectl set image deployment/devstar-docs-app devstar-docs=${{ vars.DOCKER_REGISTRY_ADDRESS }}/devstar/devstar-studio-docs:latest -n app
|
||||
|
||||
# 创建或更新 TLS Secret
|
||||
# Secret 名称: mengningsoftware-tls
|
||||
echo "$TLS_CERTIFICATE" > /tmp/tls.crt
|
||||
echo "$TLS_PRIVATE_KEY" > /tmp/tls.key
|
||||
kubectl create secret tls mengningsoftware-tls \
|
||||
--cert=/tmp/tls.crt \
|
||||
--key=/tmp/tls.key \
|
||||
-n istio-system \
|
||||
--dry-run=client -o yaml | kubectl apply -f -
|
||||
rm -f /tmp/tls.crt /tmp/tls.key
|
||||
|
||||
# 替换 Application YAML 中的镜像地址变量并应用
|
||||
sed "s|\${DOCKER_REGISTRY_ADDRESS}|${DOCKER_REGISTRY_ADDRESS}|g; s|\${DOCKER_REPOSITORY_ARTIFACT}|${DOCKER_REPOSITORY_ARTIFACT}|g; s|\${DOCKER_IMAGE_TAG}|${DOCKER_IMAGE_TAG}|g" \
|
||||
.gitea/workflows/application.yaml > /tmp/application-crd.yaml
|
||||
|
||||
# 删除旧的 Application CRD(如果存在,忽略错误)
|
||||
kubectl delete application mengningsoftware -n web-servers --ignore-not-found=true || true
|
||||
|
||||
# 创建新的 Application CRD
|
||||
kubectl apply -f /tmp/application-crd.yaml
|
||||
@@ -40,51 +40,36 @@ repo sync -j16
|
||||
cd android-14.0.0_r2
|
||||
source build/envsetup.sh # 初始化环境
|
||||
lunch aosp_arm-eng # 选择目标配置
|
||||
make # 执行单机编译,编译成功后可以看到:
|
||||
make # 执行单机编译
|
||||
```
|
||||
编译成功后可以看到:
|
||||
```
|
||||
[ 99% 130084/130093] out/host/linux-x86/bin/resourceshrinker --output out/soong/.intermediates/frameworks/base/packages/SystemUI/SystemUI
|
||||
Shrunken apk stored in:
|
||||
out/soong/.intermediates/frameworks/base/packages/SystemUI/SystemUI/android_common/SystemUI-unsigned.apk.proto.out.apk
|
||||
[100% 130093/130093] Target vbmeta image: out/target/product/generic/vbmeta.img
|
||||
|
||||
#### build completed successfully (02:20:12 (hh:mm:ss)) ####
|
||||
|
||||
```
|
||||
```
|
||||
[19:20:05.430], pending/wanted: 4, ready: 1, running: 0
|
||||
|
||||
|
||||
#### build completed successfully (02:26:51 (hh:mm:ss)) ####
|
||||
|
||||
|
||||
real 146m51.385s
|
||||
user 1327m35.812s
|
||||
sys 78m2.153s
|
||||
root@lab1:/home/lab1/android-14.0.0_r2#
|
||||
```
|
||||
|
||||
|
||||
|
||||
### 配置AOSP项目开启ShareBuild模式
|
||||
|
||||
* 将.sharebuild.yml 复制到 AOSP 根目录
|
||||
* 到 AOSP 项目根目录下执行 setup_sharebuild_forAOSP14.sh 配置ShareBuild分布式编译
|
||||
```
|
||||
cd android-14.0.0_r2
|
||||
cp /etc/ninja2/aosp14/.sharebuild.yml ./
|
||||
```
|
||||
|
||||
* 进入AOSP14项目根目录,替换 AOSP 内部 ninja 为 android_ninja
|
||||
```
|
||||
cp /usr/bin/android_ninja prebuilts/build-tools/linux-x86/bin/ninja
|
||||
```
|
||||
|
||||
* 开启ShareBuild模式,然后进行分布式编译。
|
||||
项目根目录下创建ninja2.conf 文件如下即可开启ShareBuild模式:
|
||||
```
|
||||
sharebuid:true
|
||||
/etc/ninja2/aosp14/setup_sharebuild_forAOSP14.sh
|
||||
```
|
||||
|
||||
* 在项目根目录下执行分布式编译
|
||||
|
||||
```bash
|
||||
su # 切换到 root 关闭沙箱
|
||||
source build/envsetup.sh # 初始化环境
|
||||
lunch aosp_arm-eng # 选择目标配置
|
||||
make # 执行分布式编译,编译成功后可以看到:
|
||||
make # 执行分布式编译
|
||||
```
|
||||
编译成功后可以看到:
|
||||
|
||||
* 客户端节点:
|
||||
```
|
||||
[ShareBuild] Command will be executed REMOTELY
|
||||
@@ -101,7 +86,7 @@ stdout: , stderr:
|
||||
real 102m16.362s
|
||||
user 1074m56.673s
|
||||
sys 64m22.863s
|
||||
root@lab1:/home/lab1/android-14.0.0_r2#
|
||||
|
||||
```
|
||||
* 服务端节点sharebuild部分日志:
|
||||
```
|
||||
|
||||
@@ -30,3 +30,106 @@ DevStar代码托管平台中项目设置、用户设置和后台管理中都可
|
||||
|
||||
### AI Code Review详解
|
||||
|
||||
## **一、核心工作流程说明**
|
||||
|
||||
AI Code Review 的执行过程如下:
|
||||
|
||||
1. **触发条件**
|
||||
- 当有人新建 PR(pull_request/opened)
|
||||
- 或更新了 PR(pull_request/synchronize)
|
||||
→ 工作流自动开始执行。
|
||||
2. **代码检出(Checkout)**
|
||||
使用 actions/checkout 拉取 PR 的代码差异,为审查准备上下文。
|
||||
3. **调用 AiReviewPR Action**
|
||||
- Action 会读取 PR 的 diff、文件内容与上下文。
|
||||
- 将这些内容组装为审查提示(prompt)。
|
||||
- 调用你配置的大模型(Ollama、OpenAI 兼容接口等)。
|
||||
- 获得模型输出后,自动写入 PR 评论区或输出到日志。
|
||||
4. **输出结果**
|
||||
- 如果 `REVIEW_PULL_REQUEST=true` → 自动在 PR 下评论。
|
||||
- 如果为 false → 只在日志输出,便于本地验证。
|
||||
|
||||
------
|
||||
|
||||
## **二、AiReviewPR 的内部实现机制**
|
||||
|
||||
以下是 Action 的实际工作方式(代码见 GitHub):
|
||||
|
||||
### **1. 自动收集 PR 信息**
|
||||
|
||||
Action 会自动读取:
|
||||
|
||||
- PR 编号、作者、提交信息
|
||||
- diff 内容(新增、删除、修改)
|
||||
- 受影响的文件内容
|
||||
|
||||
### **读取 PR Diff**
|
||||
|
||||
```
|
||||
const diff = await octokit.pulls.get({
|
||||
...github.context.issue,
|
||||
mediaType: { format: "diff" },
|
||||
}).then(r => r.data);
|
||||
```
|
||||
|
||||
### 2. 构建审查 Prompt
|
||||
|
||||
Prompt 会自动包含:
|
||||
|
||||
- 代码变更摘要
|
||||
|
||||
- 修改前/修改后的代码片段
|
||||
|
||||
- 模型需要回答的格式,例如:
|
||||
|
||||
```
|
||||
1. 潜在Bug
|
||||
2. 代码风格问题
|
||||
3. 性能优化
|
||||
4. 安全风险
|
||||
5. 重构建议
|
||||
```
|
||||
|
||||
### **3. 调用大模型 API**
|
||||
|
||||
AiReviewPR 支持任意 OpenAI API 兼容模型,例如:
|
||||
|
||||
- 本地 **Ollama**
|
||||
- OpenAI、DeepSeek、Qwen 公开服务
|
||||
- 私有化模型服务
|
||||
|
||||
只需提供:
|
||||
|
||||
```
|
||||
vars.MODEL
|
||||
vars.OLLAMA_HOST
|
||||
```
|
||||
|
||||
Action 会自动发送:
|
||||
|
||||
```
|
||||
{
|
||||
"model": "qwen2.5:14b",
|
||||
"messages": [
|
||||
{"role": "system", "content": "..."},
|
||||
{"role": "user", "content": "这是PR的代码修改内容..."}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
### **4. 返回数据解析**
|
||||
|
||||
Action 提取模型的回答内容,将其转换为 Markdown,并根据配置输出为:
|
||||
|
||||
- GitHub/Gitea PR 评论
|
||||
或
|
||||
- 工作流日志(便于调试)
|
||||
|
||||
### **发布评论**
|
||||
|
||||
```
|
||||
await octokit.issues.createComment({
|
||||
...github.context.issue,
|
||||
body: review,
|
||||
});
|
||||
```
|
||||
@@ -1,3 +1,4 @@
|
||||
|
||||
# DevStar AI+ DevOps
|
||||
|
||||
DevStar AI+ DevOps 是一个完整的AI驱动研发平台解决方案,通过集成 DevStar平台、代码大语言模型、Gitea MCP Server和 AI Code Tools(Cursor、Claude Code、iFlow等),为开发者提供智能化研发支撑体系。
|
||||
@@ -13,12 +14,11 @@ wget -c https://devstar.cn/assets/install.sh && chmod +x install.sh && sudo ./in
|
||||
sudo devstar start
|
||||
```
|
||||
|
||||
安装完成后,我们得到DevStar代码托管平台的URL,比如http://172.16.94.26:80
|
||||
安装完成后,我们得到DevStar代码托管平台的URL,比如http://172.16.94.26:80 ,之后作为 `GITEA_HOST`(给 MCP Server 用)
|
||||
|
||||
### 二、Ollama私有部署代码大模型
|
||||
|
||||
> 如您使用第三方大模型开放API可以跳过这一部分。
|
||||
> 比如从[智谱AI开放平台](https://bigmodel.cn/usercenter/proj-mgmt/apikeys) 上注册申请并添加新的API Key,以Claude Code为例URL使用https://open.bigmodel.cn/api/anthropic
|
||||
> 如您使用第三方API及Token,比如从[智谱AI开放平台](https://bigmodel.cn/usercenter/proj-mgmt/apikeys) 上注册申请并添加新的API Key,可以跳过这一部分。
|
||||
|
||||
Ubuntu-20.04下完成安装:
|
||||
```
|
||||
@@ -53,7 +53,14 @@ systemctl daemon-reload
|
||||
systemctl restart ollama
|
||||
```
|
||||
|
||||
安装完成后,我们得到API URL,比如http://172.16.94.26:11434/api/tags model比如qwen2.5-coder:32b token比如TOKEN***************
|
||||
**产出**
|
||||
|
||||
- 模型服务地址,例如:`http://172.16.94.26:11434`
|
||||
- 模型名,例如:`qwen2.5-coder:32b`
|
||||
|
||||
**后面用在哪里**
|
||||
|
||||
- CI/CD 里的 AI Code Review(作为 `vars.MODEL` / `vars.OLLAMA_HOST`)
|
||||
|
||||
### 三、在项目中使用代码大模型
|
||||
|
||||
@@ -82,8 +89,14 @@ jobs:
|
||||
model: ${{ vars.MODEL }}
|
||||
host: ${{ vars.OLLAMA_HOST }}
|
||||
REVIEW_PULL_REQUEST: false
|
||||
//如果用ai token 则需配置
|
||||
//ai_token: ${{ vars.AI_TOKEN }}
|
||||
```
|
||||
DevStar代码托管平台中项目设置、用户设置和后台管理中都可以设置变量vars.MODEL、vars.OLLAMA_HOST等。
|
||||
然后在 DevStar 平台上您的项目仓库【设置 > 工作流】 里设置变量或密钥:
|
||||
|
||||
- `vars.MODEL`:填入 **第二步中的模型名**,例如 `qwen2.5-coder:32b`
|
||||
- `vars.OLLAMA_HOST`:填入 **第二步中得到的模型服务地址** 例如 `http://172.16.94.26:11434`
|
||||
- `vars.AI_TOKEN`:填入 **第二步中第三方获取的API Key token**
|
||||
|
||||
#### 安装配置MCP Server
|
||||
|
||||
@@ -95,7 +108,6 @@ DevStar代码托管平台中项目设置、用户设置和后台管理中都可
|
||||
|
||||
```
|
||||
{
|
||||
"mcp": {
|
||||
"inputs": [
|
||||
{
|
||||
"type": "promptString",
|
||||
@@ -118,21 +130,42 @@ DevStar代码托管平台中项目设置、用户设置和后台管理中都可
|
||||
"docker.gitea.com/gitea-mcp-server"
|
||||
],
|
||||
"env": {
|
||||
"GITEA_HOST": "--host http://172.16.94.26",
|
||||
"GITEA_HOST": "<Your Gitea Host>",
|
||||
"GITEA_ACCESS_TOKEN": "${input:gitea_token}"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
#### 配置AI IDE/CLI使用私有大模型及MCP Server
|
||||
`GITEA_HOST`:**第一步中得到的 DevStar 代码托管平台地址**
|
||||
|
||||
`gitea_token`:来自 DevStar / Gitea 的「个人访问令牌」
|
||||
|
||||
获取方式:
|
||||
|
||||
1. 登录 DevStar 平台 (`GITEA_HOST` 对应的网站)
|
||||
2. 进入:右上角用户 【设置 > 应用】中
|
||||
3. 点击「生成新的令牌」,命名令牌并给予仓库读取等必要权限
|
||||
4. 复制生成的一串字符串,这就是你的 `gitea_token`
|
||||
|
||||
#### 配置 AI IDE/CLI 使用 LLM 及 MCP Server
|
||||
|
||||
- **LLM**
|
||||
- 使用第三方 API/云端模型(OpenAI/Anthropic/Azure/Gemini)
|
||||
- **Cursor**:在模型设置中选择对应 Provider,配置 **API Key / Base URL / Model**
|
||||
- **Continue**:Add Chat model 选择对应 Provider,配置 **API Key / Base URL / Model**
|
||||
- **Copilot**:在“管理模型”中选择 **Copilot 支持的云端模型**(若需使用自带 Key 的第三方,一般用 Cursor/Continue 接入更直接)
|
||||
- 使用本地/私有部署(Ollama / 内网 LLM)
|
||||
- **Copilot**:在“管理模型”中选择 **ollama**
|
||||
- **Continue**:Add Chat model → Provider 选择 **ollama**
|
||||
- **Cursor**:若 LLM 在内网且 Cursor 不支持直连,需要做**反向代理/企业网关**提供公网可访问的 **Base URL** 再接入
|
||||
- **MCP Server(与 LLM 选择无关)**
|
||||
- **Copilot(VS Code)**:将 MCP 配置添加到 **.vscode/mcp.json**
|
||||
- **Cursor**:Cursor Settings → Tools & MCP → New MCP Server,将 MCP 配置写入 **mcp.json**
|
||||
- **Continue**:Settings → Tools → MCP Servers 配置 YAML ,或复用 Cursor 的 MCP 配置:将 **mcp.json** 放到 **.continue/mcpServers/mcp.json**
|
||||
|
||||
|
||||
* Copilot,简要文字描述,不要上太多图,可以提供官方配置链接
|
||||
* Cursor
|
||||
* Continue
|
||||
* ...
|
||||
|
||||
## 🚀 DevStar AI+ DevOps演示
|
||||
|
||||
@@ -140,22 +173,35 @@ DevStar代码托管平台中项目设置、用户设置和后台管理中都可
|
||||
|
||||
### 创建一个项目
|
||||
|
||||
使用ai-develops项目模板创建项目
|
||||
点击右上角创建仓库->DevStar模板中选择Devstar.cn/template/ai-develops
|
||||
|
||||
todo
|
||||

|
||||
|
||||
在项目中创建一个issue
|
||||
|
||||

|
||||
|
||||
### AI生成代码
|
||||
|
||||
todo
|
||||
1.请在 Gitea 仓库 owner/repo 中读取 issue #1,帮我用自己的话总结问题、预期行为,并给出一个简单的解决思路。
|
||||
|
||||
2.请根据你对 issue #的理解实现这个功能
|
||||
|
||||
3.请为这次修复 issue #1 的改动补充或更新测试代码,遵循项目的现有测试风格,并说明每个测试在验证什么行为。
|
||||
|
||||

|
||||
|
||||
### 提交PR
|
||||
|
||||
todo
|
||||
1.请使用 Gitea MCP 为 issue #1创建一个新分支(如 fix/issue-1),将本次所有相关修改提交为一个清晰的 commit
|
||||
|
||||
2.请使用 Gitea MCP 从你刚才创建的分支向 main 分支发起一个 PR,标题中包含 “#1”,描述中简要说明问题、解决方案、主要改动和测试情况,并把 PR 的链接或编号发给我。
|
||||
|
||||

|
||||
|
||||
### AI Code Review
|
||||
|
||||
todo
|
||||

|
||||
|
||||
### 合并PR
|
||||

|
||||
|
||||
todo
|
||||
@@ -1,4 +1,3 @@
|
||||
|
||||
# MCP Server
|
||||
|
||||
### 快速安装配置MCP Server
|
||||
@@ -11,7 +10,6 @@
|
||||
|
||||
```
|
||||
{
|
||||
"mcp": {
|
||||
"inputs": [
|
||||
{
|
||||
"type": "promptString",
|
||||
@@ -34,18 +32,32 @@
|
||||
"docker.gitea.com/gitea-mcp-server"
|
||||
],
|
||||
"env": {
|
||||
"GITEA_HOST": "--host http://172.16.94.26",
|
||||
"GITEA_HOST": "<Your Gitea Host>",
|
||||
"GITEA_ACCESS_TOKEN": "${input:gitea_token}"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### MCP Server使用注意事项
|
||||
|
||||
* Copilot,简要文字描述,不要上太多图,可以提供官方配置链接
|
||||
* Cursor
|
||||
* Continue
|
||||
* ...
|
||||
#### Copilot(VS Code)
|
||||
|
||||
- 配置放在 `.vscode/mcp.json`。重启 VS Code → Copilot 自动加载。
|
||||
- [官方文档](https://vscode.js.cn/docs/copilot/customization/mcp-servers)
|
||||
|
||||
|
||||
|
||||
#### Cursor
|
||||
|
||||
- 配置放在 `.cursor/mcp.json` 或 `.vscode/mcp.json`。
|
||||
- 打开 Cursor → 会提示“检测到 MCP Server”。点 Enable 即可。
|
||||
- [官方文档](https://cursor.com/cn/docs/context/mcp)
|
||||
|
||||
|
||||
|
||||
#### Continue
|
||||
|
||||
- 配置放在 `.continue/mcpServers/mcp.json`
|
||||
- [官方文档](https://docs.continue.dev/customize/deep-dives/mcp)
|
||||
BIN
src/devstar/static/code-review.png
Normal file
|
After Width: | Height: | Size: 65 KiB |
BIN
src/devstar/static/code.png
Normal file
|
After Width: | Height: | Size: 100 KiB |
BIN
src/devstar/static/exp.png
Normal file
|
After Width: | Height: | Size: 68 KiB |
BIN
src/devstar/static/feature-2.png
Normal file
|
After Width: | Height: | Size: 121 KiB |
BIN
src/devstar/static/feature.png
Normal file
|
After Width: | Height: | Size: 34 KiB |
BIN
src/devstar/static/issue-1.png
Normal file
|
After Width: | Height: | Size: 240 KiB |
BIN
src/devstar/static/issue.png
Normal file
|
After Width: | Height: | Size: 45 KiB |
BIN
src/devstar/static/mcp.png
Normal file
|
After Width: | Height: | Size: 115 KiB |
BIN
src/devstar/static/ollama-llm.png
Normal file
|
After Width: | Height: | Size: 18 KiB |
BIN
src/devstar/static/port.png
Normal file
|
After Width: | Height: | Size: 13 KiB |
BIN
src/devstar/static/pr.png
Normal file
|
After Width: | Height: | Size: 62 KiB |
BIN
src/devstar/static/review.png
Normal file
|
After Width: | Height: | Size: 326 KiB |
BIN
src/devstar/static/template.png
Normal file
|
After Width: | Height: | Size: 214 KiB |
BIN
src/devstar/static/vars.png
Normal file
|
After Width: | Height: | Size: 65 KiB |