AhmadA82 commited on
Commit
c555338
·
verified ·
1 Parent(s): d53a45b
Files changed (7) hide show
  1. .gitattributes +35 -35
  2. Dockerfile +28 -28
  3. README.md +13 -13
  4. app.py +8 -8
  5. docker-compose.yml +19 -19
  6. main.py +25 -25
  7. requirements.txt +1 -1
.gitattributes CHANGED
@@ -1,35 +1,35 @@
1
- *.7z filter=lfs diff=lfs merge=lfs -text
2
- *.arrow filter=lfs diff=lfs merge=lfs -text
3
- *.bin filter=lfs diff=lfs merge=lfs -text
4
- *.bz2 filter=lfs diff=lfs merge=lfs -text
5
- *.ckpt filter=lfs diff=lfs merge=lfs -text
6
- *.ftz filter=lfs diff=lfs merge=lfs -text
7
- *.gz filter=lfs diff=lfs merge=lfs -text
8
- *.h5 filter=lfs diff=lfs merge=lfs -text
9
- *.joblib filter=lfs diff=lfs merge=lfs -text
10
- *.lfs.* filter=lfs diff=lfs merge=lfs -text
11
- *.mlmodel filter=lfs diff=lfs merge=lfs -text
12
- *.model filter=lfs diff=lfs merge=lfs -text
13
- *.msgpack filter=lfs diff=lfs merge=lfs -text
14
- *.npy filter=lfs diff=lfs merge=lfs -text
15
- *.npz filter=lfs diff=lfs merge=lfs -text
16
- *.onnx filter=lfs diff=lfs merge=lfs -text
17
- *.ot filter=lfs diff=lfs merge=lfs -text
18
- *.parquet filter=lfs diff=lfs merge=lfs -text
19
- *.pb filter=lfs diff=lfs merge=lfs -text
20
- *.pickle filter=lfs diff=lfs merge=lfs -text
21
- *.pkl filter=lfs diff=lfs merge=lfs -text
22
- *.pt filter=lfs diff=lfs merge=lfs -text
23
- *.pth filter=lfs diff=lfs merge=lfs -text
24
- *.rar filter=lfs diff=lfs merge=lfs -text
25
- *.safetensors filter=lfs diff=lfs merge=lfs -text
26
- saved_model/**/* filter=lfs diff=lfs merge=lfs -text
27
- *.tar.* filter=lfs diff=lfs merge=lfs -text
28
- *.tar filter=lfs diff=lfs merge=lfs -text
29
- *.tflite filter=lfs diff=lfs merge=lfs -text
30
- *.tgz filter=lfs diff=lfs merge=lfs -text
31
- *.wasm filter=lfs diff=lfs merge=lfs -text
32
- *.xz filter=lfs diff=lfs merge=lfs -text
33
- *.zip filter=lfs diff=lfs merge=lfs -text
34
- *.zst filter=lfs diff=lfs merge=lfs -text
35
- *tfevents* filter=lfs diff=lfs merge=lfs -text
 
1
+ *.7z filter=lfs diff=lfs merge=lfs -text
2
+ *.arrow filter=lfs diff=lfs merge=lfs -text
3
+ *.bin filter=lfs diff=lfs merge=lfs -text
4
+ *.bz2 filter=lfs diff=lfs merge=lfs -text
5
+ *.ckpt filter=lfs diff=lfs merge=lfs -text
6
+ *.ftz filter=lfs diff=lfs merge=lfs -text
7
+ *.gz filter=lfs diff=lfs merge=lfs -text
8
+ *.h5 filter=lfs diff=lfs merge=lfs -text
9
+ *.joblib filter=lfs diff=lfs merge=lfs -text
10
+ *.lfs.* filter=lfs diff=lfs merge=lfs -text
11
+ *.mlmodel filter=lfs diff=lfs merge=lfs -text
12
+ *.model filter=lfs diff=lfs merge=lfs -text
13
+ *.msgpack filter=lfs diff=lfs merge=lfs -text
14
+ *.npy filter=lfs diff=lfs merge=lfs -text
15
+ *.npz filter=lfs diff=lfs merge=lfs -text
16
+ *.onnx filter=lfs diff=lfs merge=lfs -text
17
+ *.ot filter=lfs diff=lfs merge=lfs -text
18
+ *.parquet filter=lfs diff=lfs merge=lfs -text
19
+ *.pb filter=lfs diff=lfs merge=lfs -text
20
+ *.pickle filter=lfs diff=lfs merge=lfs -text
21
+ *.pkl filter=lfs diff=lfs merge=lfs -text
22
+ *.pt filter=lfs diff=lfs merge=lfs -text
23
+ *.pth filter=lfs diff=lfs merge=lfs -text
24
+ *.rar filter=lfs diff=lfs merge=lfs -text
25
+ *.safetensors filter=lfs diff=lfs merge=lfs -text
26
+ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
27
+ *.tar.* filter=lfs diff=lfs merge=lfs -text
28
+ *.tar filter=lfs diff=lfs merge=lfs -text
29
+ *.tflite filter=lfs diff=lfs merge=lfs -text
30
+ *.tgz filter=lfs diff=lfs merge=lfs -text
31
+ *.wasm filter=lfs diff=lfs merge=lfs -text
32
+ *.xz filter=lfs diff=lfs merge=lfs -text
33
+ *.zip filter=lfs diff=lfs merge=lfs -text
34
+ *.zst filter=lfs diff=lfs merge=lfs -text
35
+ *tfevents* filter=lfs diff=lfs merge=lfs -text
Dockerfile CHANGED
@@ -1,28 +1,28 @@
1
- FROM python:3.10-slim
2
-
3
- # إضافة حزم أساسية فقط
4
- RUN apt-get update && apt-get install -y \
5
- build-essential pkg-config curl \
6
- && rm -rf /var/lib/apt/lists/*
7
-
8
- # إضافة مستخدم غير root
9
- RUN useradd -m -u 1000 user
10
- WORKDIR /home/user/app
11
- COPY --chown=user . .
12
-
13
- # مجلد التخزين المؤقت للنموذج
14
- RUN mkdir -p /home/user/app/data && chown -R user:user /home/user/app/data
15
-
16
- # إنشاء البيئة الافتراضية
17
- RUN python -m venv /home/user/venv
18
- ENV PATH="/home/user/venv/bin:$PATH"
19
-
20
-
21
- # تثبيت المتطلبات
22
- RUN pip install --upgrade pip
23
- RUN pip install --no-cache-dir -r requirements.txt
24
-
25
- USER user
26
-
27
- CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--port", "7860"]
28
- EXPOSE 7860
 
1
+ FROM python:3.10-slim
2
+
3
+ # إضافة حزم أساسية فقط
4
+ RUN apt-get update && apt-get install -y \
5
+ build-essential pkg-config curl \
6
+ && rm -rf /var/lib/apt/lists/*
7
+
8
+ # إضافة مستخدم غير root
9
+ RUN useradd -m -u 1000 user
10
+ WORKDIR /home/user/app
11
+ COPY --chown=user . .
12
+
13
+ # مجلد التخزين المؤقت للنموذج
14
+ RUN mkdir -p /home/user/app/data && chown -R user:user /home/user/app/data
15
+
16
+ # إنشاء البيئة الافتراضية
17
+ RUN python -m venv /home/user/venv
18
+ ENV PATH="/home/user/venv/bin:$PATH"
19
+
20
+
21
+ # تثبيت المتطلبات
22
+ RUN pip install --upgrade pip
23
+ RUN pip install --no-cache-dir -r requirements.txt
24
+
25
+ USER user
26
+
27
+ CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--port", "7860"]
28
+ EXPOSE 7860
README.md CHANGED
@@ -1,13 +1,13 @@
1
- ---
2
- title: AI Development Assistant
3
- emoji: 🏢
4
- colorFrom: green
5
- colorTo: indigo
6
- sdk: docker
7
- pinned: false
8
- license: apache-2.0
9
- short_description: Multi-level programmer with simple interface
10
- app_port: 7860
11
- ---
12
-
13
- Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
 
1
+ ---
2
+ title: AI Development Assistant
3
+ emoji: 🏢
4
+ colorFrom: green
5
+ colorTo: indigo
6
+ sdk: docker
7
+ pinned: false
8
+ license: apache-2.0
9
+ short_description: Multi-level programmer with simple interface
10
+ app_port: 7860
11
+ ---
12
+
13
+ Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
app.py CHANGED
@@ -121,11 +121,11 @@ def load_local_model_if_configured():
121
  logger.warning("⚠️ llama_cpp غير متاح. لن يعمل النموذج المحلي.")
122
  return
123
 
124
- try:
125
  logger.info(f"⬇️ تحميل نموذج GGUF: {LOCAL_GGUF_REPO}/{LOCAL_GGUF_FILE}")
126
  llm = Llama.from_pretrained(
127
- repo_id=LOCAL_GGUF_REPO,
128
- filename=LOCAL_GGUF_FILE,
129
  # Llama params
130
  n_ctx=int(os.getenv("N_CTX", "32768")),
131
  n_threads=int(os.getenv("N_THREADS", "2")),
@@ -212,7 +212,7 @@ def call_hf_inference(prompt: str, max_new_tokens: int = 900) -> str:
212
  raise RuntimeError("تم تعطيل HF Inference. النموذج المحلي مستخدم فقط.")
213
 
214
  def call_llm(prompt: str, max_tokens: int = 900) -> str:
215
- return call_local_llm(prompt, max_tokens)
216
 
217
  # =========================
218
  # بناء الـ Prompt للدردشة (نسخة مبسطة)
@@ -442,10 +442,10 @@ def classify_intent(history: List[List[str]], message: str) -> Dict[str, Any]:
442
  def chat(req: ChatRequest):
443
  history = get_history(req.session_id)
444
  prompt = build_chat_prompt(history, req.message, "")
445
- try:
446
- response_text = call_llm(prompt, max_tokens=700)
447
- except Exception as e:
448
- raise HTTPException(status_code=500, detail=f"LLM error: {str(e)}")
449
 
450
  updated = (history + [[req.message, response_text]])[-8:]
451
  save_history(req.session_id, updated)
 
121
  logger.warning("⚠️ llama_cpp غير متاح. لن يعمل النموذج المحلي.")
122
  return
123
 
124
+ try:
125
  logger.info(f"⬇️ تحميل نموذج GGUF: {LOCAL_GGUF_REPO}/{LOCAL_GGUF_FILE}")
126
  llm = Llama.from_pretrained(
127
+ repo_id=LOCAL_GGUF_REPO,
128
+ filename=LOCAL_GGUF_FILE,
129
  # Llama params
130
  n_ctx=int(os.getenv("N_CTX", "32768")),
131
  n_threads=int(os.getenv("N_THREADS", "2")),
 
212
  raise RuntimeError("تم تعطيل HF Inference. النموذج المحلي مستخدم فقط.")
213
 
214
  def call_llm(prompt: str, max_tokens: int = 900) -> str:
215
+ return call_local_llm(prompt, max_tokens)
216
 
217
  # =========================
218
  # بناء الـ Prompt للدردشة (نسخة مبسطة)
 
442
  def chat(req: ChatRequest):
443
  history = get_history(req.session_id)
444
  prompt = build_chat_prompt(history, req.message, "")
445
+ try:
446
+ response_text = call_llm(prompt, max_tokens=700)
447
+ except Exception as e:
448
+ raise HTTPException(status_code=500, detail=f"LLM error: {str(e)}")
449
 
450
  updated = (history + [[req.message, response_text]])[-8:]
451
  save_history(req.session_id, updated)
docker-compose.yml CHANGED
@@ -1,20 +1,20 @@
1
- version: '3.9'
2
-
3
- services:
4
- ai-assistant:
5
- build:
6
- context: .
7
- dockerfile: Dockerfile
8
- container_name: ai-dev-assistant
9
- ports:
10
- - "7860:7860"
11
- volumes:
12
- - ./data:/home/user/app/data
13
- environment:
14
- - HF_TOKEN=${HF_TOKEN}
15
- restart: unless-stopped
16
- healthcheck:
17
- test: ["CMD", "curl", "-f", "http://localhost:7860/"]
18
- interval: 30s
19
- timeout: 20s
20
  retries: 5
 
1
+ version: '3.9'
2
+
3
+ services:
4
+ ai-assistant:
5
+ build:
6
+ context: .
7
+ dockerfile: Dockerfile
8
+ container_name: ai-dev-assistant
9
+ ports:
10
+ - "7860:7860"
11
+ volumes:
12
+ - ./data:/home/user/app/data
13
+ environment:
14
+ - HF_TOKEN=${HF_TOKEN}
15
+ restart: unless-stopped
16
+ healthcheck:
17
+ test: ["CMD", "curl", "-f", "http://localhost:7860/"]
18
+ interval: 30s
19
+ timeout: 20s
20
  retries: 5
main.py CHANGED
@@ -1,26 +1,26 @@
1
- import logging
2
- import os
3
-
4
- # إعداد السجل قبل الاستيرادات الأخرى
5
- logging.basicConfig(
6
- level=logging.DEBUG,
7
- format="🪵 [%(asctime)s] [%(levelname)s] %(message)s",
8
- handlers=[
9
- logging.StreamHandler(),
10
- logging.FileHandler("data/app.log")
11
- ]
12
- )
13
- logger = logging.getLogger(__name__)
14
-
15
- # إنشاء مجلد data إذا لم يكن موجوداً
16
- data_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), "data")
17
- os.makedirs(data_dir, exist_ok=True)
18
- logger.info(f"📁 مجلد البيانات جاهز: {data_dir}")
19
-
20
- try:
21
- from app import app
22
- logger.info("✅ استيراد app من app.py ناجح")
23
-
24
- except ImportError as e:
25
- logger.error(f"❌ فشل استيراد app من app.py: {str(e)}")
26
  raise
 
1
+ import logging
2
+ import os
3
+
4
+ # إعداد السجل قبل الاستيرادات الأخرى
5
+ logging.basicConfig(
6
+ level=logging.DEBUG,
7
+ format="🪵 [%(asctime)s] [%(levelname)s] %(message)s",
8
+ handlers=[
9
+ logging.StreamHandler(),
10
+ logging.FileHandler("data/app.log")
11
+ ]
12
+ )
13
+ logger = logging.getLogger(__name__)
14
+
15
+ # إنشاء مجلد data إذا لم يكن موجوداً
16
+ data_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), "data")
17
+ os.makedirs(data_dir, exist_ok=True)
18
+ logger.info(f"📁 مجلد البيانات جاهز: {data_dir}")
19
+
20
+ try:
21
+ from app import app
22
+ logger.info("✅ استيراد app من app.py ناجح")
23
+
24
+ except ImportError as e:
25
+ logger.error(f"❌ فشل استيراد app من app.py: {str(e)}")
26
  raise
requirements.txt CHANGED
@@ -5,4 +5,4 @@ psutil==5.9.8
5
  aiosqlite==0.20.0
6
  python-multipart==0.0.9
7
  transformers==4.42.4
8
- llama-cpp-python --config-settings "cmake.args=-DLLAMA_BLAS=ON -DLLAMA_BLAS_VENDOR=OpenBLAS"
 
5
  aiosqlite==0.20.0
6
  python-multipart==0.0.9
7
  transformers==4.42.4
8
+ llama-cpp-python==0.2.84