KOyuki-0129 commited on
Commit
9b33a0e
·
verified ·
1 Parent(s): 99cf7cf

Upload 4 files

Browse files
Files changed (4) hide show
  1. Dockerfile +18 -0
  2. docker-compose.yml +35 -0
  3. requirements.txt +5 -0
  4. 日次RA.py +181 -0
Dockerfile ADDED
@@ -0,0 +1,18 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ FROM python:3.10-slim
2
+
3
+ WORKDIR /app
4
+
5
+ RUN apt-get update && apt-get install -y \
6
+ build-essential \
7
+ && rm -rf /var/lib/apt/lists/*
8
+
9
+ COPY requirements.txt .
10
+ RUN pip install --no-cache-dir -r requirements.txt
11
+
12
+ COPY . .
13
+
14
+ EXPOSE 7860
15
+
16
+ HEALTHCHECK CMD curl --fail http://localhost:7860/_stcore/health || exit 1
17
+
18
+ ENTRYPOINT ["streamlit", "run", "日次RA.py", "--server.port=7860", "--server.address=0.0.0.0"]
docker-compose.yml ADDED
@@ -0,0 +1,35 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ version: '3.8'
2
+
3
+ services:
4
+ mariadb:
5
+ image: bitnami/mariadb:latest
6
+ container_name: mariadb_test
7
+ environment:
8
+ - MARIADB_ROOT_PASSWORD=rootpassword
9
+ - MARIADB_DATABASE=bitnami_webapp
10
+ - MARIADB_USER=user
11
+ - MARIADB_PASSWORD=pass
12
+ ports:
13
+ - "3306:3306"
14
+ volumes:
15
+ - mariadb_data:/bitnami/mariadb
16
+
17
+ laravel:
18
+ image: bitnami/laravel:latest
19
+ container_name: laravel_test
20
+ environment:
21
+ - DB_CONNECTION=mysql
22
+ - DB_HOST=mariadb
23
+ - DB_PORT=3306
24
+ - DB_DATABASE=bitnami_webapp
25
+ - DB_USERNAME=user
26
+ - DB_PASSWORD=pass
27
+ ports:
28
+ - "8000:8000"
29
+ depends_on:
30
+ - mariadb
31
+ volumes:
32
+ - ./web_app:/app
33
+
34
+ volumes:
35
+ mariadb_data:
requirements.txt ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ streamlit
2
+ pymysql
3
+ pandas
4
+ sentence-transformers
5
+ sudachipy
日次RA.py ADDED
@@ -0,0 +1,181 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import pymysql
3
+ import json
4
+ from datetime import datetime
5
+ from sudachipy import dictionary, tokenizer
6
+ from sentence_transformers import SentenceTransformer, util
7
+ import pandas as pd
8
+ import os
9
+
10
+ # ============================
11
+ # 🔧 1. 設定
12
+ # ============================
13
+
14
+ # DB接続
15
+ conn = pymysql.connect(
16
+ host='localhost',
17
+ user='user',
18
+ password='pass',
19
+ db='ra_db',
20
+ charset='utf8mb4',
21
+ autocommit=True
22
+ )
23
+ cur = conn.cursor()
24
+
25
+ # SudachiPy セットアップ
26
+ sudachi_tokenizer = dictionary.Dictionary().create()
27
+ def sudachi_tokenizer_func(text):
28
+ tokens = sudachi_tokenizer.tokenize(text, tokenizer.Tokenizer.SplitMode.C)
29
+ return [t.surface() for t in tokens]
30
+
31
+ # SentenceTransformerモデル
32
+ model = SentenceTransformer("all-MiniLM-L12-v2")
33
+
34
+ # 正規化辞書
35
+ NORMALIZE = {
36
+ "重機": ["ショベルカー", "ユンボ", "バックホウ", "グレーダー"],
37
+ "作業員": ["作業者", "職人", "人"],
38
+ "クレーン": ["クレーン車", "吊り上げ機"],
39
+ "足場": ["仮設足場", "高所足場"],
40
+ "吊荷": ["荷", "吊り荷", "吊下げ物"]
41
+ }
42
+
43
+ # 分類キーワード
44
+ OBJECTS = ["作業員", "重機", "クレーン", "吊荷", "足場", "ダンプ"]
45
+ RISKS = ["挟まれ", "接触", "墜落", "転倒", "感電", "落下", "衝突"]
46
+
47
+ POTENTIAL_RISKS = {
48
+ ("作業員", "重機"): "作業員と重機が近接している状態",
49
+ ("作業員", "足場"): "作業員が高所作業中の可能性",
50
+ ("クレーン", "吊荷"): "吊荷の下に人がいる可能性",
51
+ ("作業員", "吊荷"): "作業員が吊荷の下にいる可能性",
52
+ }
53
+
54
+ # ============================
55
+ # 🧩 2. 関数群
56
+ # ============================
57
+
58
+ def normalize_text(text):
59
+ """表記ゆれ統一"""
60
+ for base, words in NORMALIZE.items():
61
+ for w in words:
62
+ text = text.replace(w, base)
63
+ return text
64
+
65
+ def extract_relations(text):
66
+ """
67
+ 文中の対象物とリスクを組み合わせて簡易ペア抽出
68
+ """
69
+ pairs = []
70
+ text_norm = normalize_text(text)
71
+
72
+ # 文中の対象物を検出
73
+ found_objects = [obj for obj in OBJECTS if obj in text_norm]
74
+
75
+ # 文中のリスクワードを検出
76
+ found_risks = [risk for risk in RISKS if risk in text_norm]
77
+
78
+ # 複数対象物とリスクがある場合にペア化
79
+ if len(found_objects) >= 2 and found_risks:
80
+ for i in range(len(found_objects)):
81
+ for j in range(i+1, len(found_objects)):
82
+ pairs.append((found_objects[i], found_objects[j], found_risks))
83
+ return pairs
84
+
85
+ def generate_rules(data):
86
+ """ルールベース生成"""
87
+ text = normalize_text(" ".join([
88
+ data["work_content"],
89
+ data["hazard_points"],
90
+ data["risk_identification"],
91
+ data["mitigation_measures"]
92
+ ]))
93
+
94
+ # 構文関係抽出
95
+ relations = extract_relations(text)
96
+
97
+ rules = []
98
+ for subj, obj, _ in relations:
99
+ # 潜在リスクを確認
100
+ risk_desc = POTENTIAL_RISKS.get((subj, obj)) or POTENTIAL_RISKS.get((obj, subj)) or []
101
+ rules.append({
102
+ "object1": subj,
103
+ "object2": obj,
104
+ "risk": risk_desc
105
+ })
106
+ return rules
107
+
108
+ # ============================
109
+ # 🖥 3. Streamlit UI
110
+ # ============================
111
+
112
+ st.title("日次RA入力")
113
+
114
+ with st.form("ra_form"):
115
+ work_date = st.date_input("作業日")
116
+ work_content = st.text_area("作業内容")
117
+ hazard_points = st.text_area("作業危険ポイント")
118
+ general_comments = st.text_area("元請コメント")
119
+ risk_identification = st.text_area("危険性・有害性の特定")
120
+ mitigation_measures = st.text_area("危険性・有害性の低減策")
121
+ inspection_items = st.text_area("点検事項")
122
+
123
+ submitted = st.form_submit_button("保存")
124
+
125
+ # ============================
126
+ # フォーム送信後の処理
127
+ # ============================
128
+ if submitted:
129
+ # --- 入力データを辞書にまとめる ---
130
+ form_data = {
131
+ "work_date": str(work_date),
132
+ "work_content": work_content,
133
+ "hazard_points": hazard_points,
134
+ "general_comments": general_comments,
135
+ "risk_identification": risk_identification,
136
+ "mitigation_measures": mitigation_measures,
137
+ "inspection_items": inspection_items
138
+ }
139
+
140
+ # --- ルール生成 ---
141
+ rules = generate_rules(form_data)
142
+
143
+ # --- MySQL 保存 ---
144
+ sql = """INSERT INTO daily_ra
145
+ (work_date, work_content, hazard_points, general_comments, risk_identification, mitigation_measures, inspection_items, created_at)
146
+ VALUES (%s,%s,%s,%s,%s,%s,%s,NOW())"""
147
+ cur.execute(sql, tuple(form_data.values()))
148
+ daily_id = cur.lastrowid # daily_id を取得
149
+
150
+ for r in rules:
151
+ sql_rule = """INSERT INTO rule_base (daily_ra_id, object1, object2, risk, created_at)
152
+ VALUES (%s,%s,%s,%s,NOW())"""
153
+ cur.execute(sql_rule, (daily_id, r["object1"], r["object2"], json.dumps(r["risk"], ensure_ascii=False)))
154
+
155
+ conn.commit()
156
+ st.success("✅ 入力内容とルールベースの生成・保存が完了しました!")
157
+
158
+ # --- 表形式でルール表示 ---
159
+ if rules:
160
+ df = pd.DataFrame(rules)
161
+ st.subheader("🔍 生成されたルール(テーブル形式)")
162
+ st.dataframe(df)
163
+
164
+ # --- JSON作成(LLM連携用)&保存 ---
165
+ json_data = {
166
+ "daily_id": daily_id,
167
+ "rules": rules
168
+ }
169
+
170
+ # JSON保存用ディレクトリ作成
171
+ json_dir = "json_data"
172
+ os.makedirs(json_dir, exist_ok=True)
173
+
174
+ # ファイル名に daily_id とタイムスタンプを付与
175
+ json_path = os.path.join(json_dir, f"daily_ra_{daily_id}_{datetime.now().strftime('%Y%m%d_%H%M%S')}.json")
176
+
177
+ # JSONファイルとして保存
178
+ with open(json_path, "w", encoding="utf-8") as f:
179
+ json.dump(json_data, f, ensure_ascii=False, indent=2)
180
+
181
+ st.success(f"✅ JSONファイルを保存しました: {json_path}")