drzg commited on
Commit
3945dbb
·
1 Parent(s): efad76a

updating all

Browse files
package-lock.json ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ {
2
+ "name": "app",
3
+ "lockfileVersion": 3,
4
+ "requires": true,
5
+ "packages": {}
6
+ }
pinch_tool/.gitignore ADDED
@@ -0,0 +1,376 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Created by https://www.toptal.com/developers/gitignore/api/python,visualstudiocode,pycharm,microsoftoffice,windows,git,jupyternotebooks
2
+ # Edit at https://www.toptal.com/developers/gitignore?templates=python,visualstudiocode,pycharm,microsoftoffice,windows,git,jupyternotebooks
3
+
4
+ ### Git ###
5
+ # Created by git for backups. To disable backups in Git:
6
+ # $ git config --global mergetool.keepBackup false
7
+ *.orig
8
+
9
+ # Created by git when using merge tools for conflicts
10
+ *.BACKUP.*
11
+ *.BASE.*
12
+ *.LOCAL.*
13
+ *.REMOTE.*
14
+ *_BACKUP_*.txt
15
+ *_BASE_*.txt
16
+ *_LOCAL_*.txt
17
+ *_REMOTE_*.txt
18
+
19
+ ### JupyterNotebooks ###
20
+ # gitignore template for Jupyter Notebooks
21
+ # website: http://jupyter.org/
22
+
23
+ .ipynb_checkpoints
24
+ */.ipynb_checkpoints/*
25
+
26
+ # IPython
27
+ profile_default/
28
+ ipython_config.py
29
+
30
+ # Remove previous ipynb_checkpoints
31
+ # git rm -r .ipynb_checkpoints/
32
+
33
+ ### MicrosoftOffice ###
34
+ *.tmp
35
+
36
+ # Word temporary
37
+ ~$*.doc*
38
+
39
+ # Word Auto Backup File
40
+ Backup of *.doc*
41
+
42
+ # Excel temporary
43
+ ~$*.xls*
44
+
45
+ # Excel Backup File
46
+ *.xlk
47
+
48
+ # PowerPoint temporary
49
+ ~$*.ppt*
50
+
51
+ # Visio autosave temporary files
52
+ *.~vsd*
53
+
54
+ Buffer file for TotalSiteProfile creation.csv
55
+
56
+ ### PyCharm ###
57
+ # Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider
58
+ # Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
59
+
60
+ # User-specific stuff
61
+ .idea/**/workspace.xml
62
+ .idea/**/tasks.xml
63
+ .idea/**/usage.statistics.xml
64
+ .idea/**/dictionaries
65
+ .idea/**/shelf
66
+
67
+ # AWS User-specific
68
+ .idea/**/aws.xml
69
+
70
+ # Generated files
71
+ .idea/**/contentModel.xml
72
+
73
+ # Sensitive or high-churn files
74
+ .idea/**/dataSources/
75
+ .idea/**/dataSources.ids
76
+ .idea/**/dataSources.local.xml
77
+ .idea/**/sqlDataSources.xml
78
+ .idea/**/dynamic.xml
79
+ .idea/**/uiDesigner.xml
80
+ .idea/**/dbnavigator.xml
81
+
82
+ # Gradle
83
+ .idea/**/gradle.xml
84
+ .idea/**/libraries
85
+
86
+ # Gradle and Maven with auto-import
87
+ # When using Gradle or Maven with auto-import, you should exclude module files,
88
+ # since they will be recreated, and may cause churn. Uncomment if using
89
+ # auto-import.
90
+ # .idea/artifacts
91
+ # .idea/compiler.xml
92
+ # .idea/jarRepositories.xml
93
+ # .idea/modules.xml
94
+ # .idea/*.iml
95
+ # .idea/modules
96
+ # *.iml
97
+ # *.ipr
98
+
99
+ # CMake
100
+ cmake-build-*/
101
+
102
+ # Mongo Explorer plugin
103
+ .idea/**/mongoSettings.xml
104
+
105
+ # File-based project format
106
+ *.iws
107
+
108
+ # IntelliJ
109
+ out/
110
+
111
+ # mpeltonen/sbt-idea plugin
112
+ .idea_modules/
113
+
114
+ # JIRA plugin
115
+ atlassian-ide-plugin.xml
116
+
117
+ # Cursive Clojure plugin
118
+ .idea/replstate.xml
119
+
120
+ # SonarLint plugin
121
+ .idea/sonarlint/
122
+
123
+ # Crashlytics plugin (for Android Studio and IntelliJ)
124
+ com_crashlytics_export_strings.xml
125
+ crashlytics.properties
126
+ crashlytics-build.properties
127
+ fabric.properties
128
+
129
+ # Editor-based Rest Client
130
+ .idea/httpRequests
131
+
132
+ # Android studio 3.1+ serialized cache file
133
+ .idea/caches/build_file_checksums.ser
134
+
135
+ ### PyCharm Patch ###
136
+ # Comment Reason: https://github.com/joeblau/gitignore.io/issues/186#issuecomment-215987721
137
+
138
+ # *.iml
139
+ # modules.xml
140
+ # .idea/misc.xml
141
+ # *.ipr
142
+
143
+ # Sonarlint plugin
144
+ # https://plugins.jetbrains.com/plugin/7973-sonarlint
145
+ .idea/**/sonarlint/
146
+
147
+ # SonarQube Plugin
148
+ # https://plugins.jetbrains.com/plugin/7238-sonarqube-community-plugin
149
+ .idea/**/sonarIssues.xml
150
+
151
+ # Markdown Navigator plugin
152
+ # https://plugins.jetbrains.com/plugin/7896-markdown-navigator-enhanced
153
+ .idea/**/markdown-navigator.xml
154
+ .idea/**/markdown-navigator-enh.xml
155
+ .idea/**/markdown-navigator/
156
+
157
+ # Cache file creation bug
158
+ # See https://youtrack.jetbrains.com/issue/JBR-2257
159
+ .idea/$CACHE_FILE$
160
+
161
+ # CodeStream plugin
162
+ # https://plugins.jetbrains.com/plugin/12206-codestream
163
+ .idea/codestream.xml
164
+
165
+ # Azure Toolkit for IntelliJ plugin
166
+ # https://plugins.jetbrains.com/plugin/8053-azure-toolkit-for-intellij
167
+ .idea/**/azureSettings.xml
168
+
169
+ ### Python ###
170
+ # Byte-compiled / optimized / DLL files
171
+ __pycache__/
172
+ *.py[cod]
173
+ *$py.class
174
+
175
+ # C extensions
176
+ *.so
177
+
178
+ # Distribution / packaging
179
+ .Python
180
+ build/
181
+ develop-eggs/
182
+ dist/
183
+ downloads/
184
+ eggs/
185
+ .eggs/
186
+ lib/
187
+ lib64/
188
+ parts/
189
+ sdist/
190
+ var/
191
+ wheels/
192
+ share/python-wheels/
193
+ *.egg-info/
194
+ .installed.cfg
195
+ *.egg
196
+ MANIFEST
197
+
198
+ # PyInstaller
199
+ # Usually these files are written by a python script from a template
200
+ # before PyInstaller builds the exe, so as to inject date/other infos into it.
201
+ *.manifest
202
+ *.spec
203
+
204
+ # Installer logs
205
+ pip-log.txt
206
+ pip-delete-this-directory.txt
207
+
208
+ # Unit test / coverage reports
209
+ htmlcov/
210
+ .tox/
211
+ .nox/
212
+ .coverage
213
+ .coverage.*
214
+ .cache
215
+ nosetests.xml
216
+ coverage.xml
217
+ *.cover
218
+ *.py,cover
219
+ .hypothesis/
220
+ .pytest_cache/
221
+ cover/
222
+
223
+ # Translations
224
+ *.mo
225
+ *.pot
226
+
227
+ # Django stuff:
228
+ *.log
229
+ local_settings.py
230
+ db.sqlite3
231
+ db.sqlite3-journal
232
+
233
+ # Flask stuff:
234
+ instance/
235
+ .webassets-cache
236
+
237
+ # Scrapy stuff:
238
+ .scrapy
239
+
240
+ # Sphinx documentation
241
+ docs/_build/
242
+
243
+ # PyBuilder
244
+ .pybuilder/
245
+ target/
246
+
247
+ # Jupyter Notebook
248
+
249
+ # IPython
250
+
251
+ # pyenv
252
+ # For a library or package, you might want to ignore these files since the code is
253
+ # intended to run in multiple environments; otherwise, check them in:
254
+ # .python-version
255
+
256
+ # pipenv
257
+ # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
258
+ # However, in case of collaboration, if having platform-specific dependencies or dependencies
259
+ # having no cross-platform support, pipenv may install dependencies that don't work, or not
260
+ # install all needed dependencies.
261
+ #Pipfile.lock
262
+
263
+ # poetry
264
+ # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
265
+ # This is especially recommended for binary packages to ensure reproducibility, and is more
266
+ # commonly ignored for libraries.
267
+ # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
268
+ #poetry.lock
269
+
270
+ # pdm
271
+ # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
272
+ #pdm.lock
273
+ # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
274
+ # in version control.
275
+ # https://pdm.fming.dev/#use-with-ide
276
+ .pdm.toml
277
+
278
+ # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
279
+ __pypackages__/
280
+
281
+ # Celery stuff
282
+ celerybeat-schedule
283
+ celerybeat.pid
284
+
285
+ # SageMath parsed files
286
+ *.sage.py
287
+
288
+ # Environments
289
+ .env
290
+ .venv
291
+ env/
292
+ venv/
293
+ ENV/
294
+ env.bak/
295
+ venv.bak/
296
+
297
+ # Spyder project settings
298
+ .spyderproject
299
+ .spyproject
300
+
301
+ # Rope project settings
302
+ .ropeproject
303
+
304
+ # mkdocs documentation
305
+ /site
306
+
307
+ # mypy
308
+ .mypy_cache/
309
+ .dmypy.json
310
+ dmypy.json
311
+
312
+ # Pyre type checker
313
+ .pyre/
314
+
315
+ # pytype static type analyzer
316
+ .pytype/
317
+
318
+ # Cython debug symbols
319
+ cython_debug/
320
+
321
+ # PyCharm
322
+ # JetBrains specific template is maintained in a separate JetBrains.gitignore that can
323
+ # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
324
+ # and can be added to the global gitignore or merged into this file. For a more nuclear
325
+ # option (not recommended) you can uncomment the following to ignore the entire idea folder.
326
+ #.idea/
327
+
328
+ ### Python Patch ###
329
+ # Poetry local configuration file - https://python-poetry.org/docs/configuration/#local-configuration
330
+ poetry.toml
331
+
332
+ # ruff
333
+ .ruff_cache/
334
+
335
+ # LSP config files
336
+ pyrightconfig.json
337
+
338
+ ### VisualStudioCode ###
339
+ .vscode/*
340
+
341
+ # Local History for Visual Studio Code
342
+ .history/
343
+
344
+ # Built Visual Studio Code Extensions
345
+ *.vsix
346
+
347
+ ### VisualStudioCode Patch ###
348
+ # Ignore all local history of files
349
+ .history
350
+ .ionide
351
+
352
+ ### Windows ###
353
+ # Windows thumbnail cache files
354
+ Thumbs.db
355
+ Thumbs.db:encryptable
356
+ ehthumbs.db
357
+ ehthumbs_vista.db
358
+
359
+ # Dump file
360
+ *.stackdump
361
+
362
+ # Folder config file
363
+ [Dd]esktop.ini
364
+
365
+ # Recycle Bin used on file shares
366
+ $RECYCLE.BIN/
367
+
368
+ # Windows Installer files
369
+ *.cab
370
+ *.msi
371
+ *.msix
372
+ *.msm
373
+ *.msp
374
+
375
+ # Windows shortcuts
376
+ *.lnk
pinch_tool/.gitlab-ci.yml ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ include:
2
+ - project: 'est/est-repo-templates/gitlab-ci-templates'
3
+ file: 'python/python-default.gitlab-ci.yml'
pinch_tool/HPI_main.py ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from Pinch_main import Pinchmain
2
+ from Modules.HeatPumpIntegration.HeatPumpIntegration import HeatPumpIntegration as HPI
3
+ import matplotlib.pyplot as plt
4
+
5
+ class HPImain():
6
+ def __init__(self, streamsDataFile, TS = None):
7
+ self.pyPinchHPI = Pinchmain(streamsDataFile, options= {})
8
+ self.HPI = HPI(streamsDataFile, TS, self.pyPinchHPI)
9
+ def showHPI(self):
10
+ self.HPI.HPI()
11
+ plt.show()
12
+ HPImain('Example.csv').showHPI()
pinch_tool/ISSP_main.py ADDED
@@ -0,0 +1,22 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from Pinch_main import Pinchmain
2
+ from Modules.ISSP.ISSP import ISSP
3
+ from Modules.HeatPumpIntegration.HeatPumpIntegration import HeatPumpIntegration as HPI
4
+ import matplotlib.pyplot as plt
5
+
6
+ class ISSPmain():
7
+ def __init__(self, streamsDataFile, TS, TProcess, batchtimeSeconds, fromPinch = False, intermediateCircuit = {}):
8
+ self.pyPinchISSP = Pinchmain(streamsDataFile, options= {})
9
+ self.pyPinchHPI = Pinchmain(streamsDataFile, options= {})
10
+ self.HPI = HPI(streamsDataFile, TS, self.pyPinchHPI)
11
+ self.fromPinch = bool(fromPinch)
12
+ self.intermediateCircuit = bool(intermediateCircuit)
13
+ self.ISSP = ISSP(streamsDataFile, TS, TProcess, batchtimeSeconds, self.pyPinchISSP, self.HPI, self.fromPinch, self.intermediateCircuit)
14
+
15
+ def solveISSP(self):
16
+ self.ISSP.CCinkWh()
17
+ self.ISSP.drawISSPHotIntermediate()
18
+ self.ISSP.drawISSPColdIntermediate()
19
+ #self.HPI.drawGrandCompositeCurve()
20
+ plt.show()
21
+ #ISSPmain('Prozess_neu.csv', 200, 170, batchtimeSeconds= 9240, fromPinch=False).solveISSP()
22
+ ISSPmain('Example.csv', 143, 113, batchtimeSeconds= 1000, fromPinch=False).solveISSP()
pinch_tool/InputCSVs/.gitignore ADDED
@@ -0,0 +1 @@
 
 
1
+ *.csv
pinch_tool/InputCSVs/.gitkeep ADDED
File without changes
pinch_tool/Modules/HeatPumpIntegration/HPIPlot.py ADDED
@@ -0,0 +1,61 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from matplotlib import pyplot as plt
2
+
3
+ class HPIPlot():
4
+ def __init__(self, processdesignation, Tsinkout, pyPinch, EvWP, KoWP, COPWerte, COPT, GCCdraw, _temperatures, COPRegression):
5
+ self.processdesignation = processdesignation
6
+ self.grandCompositeCurve = GCCdraw
7
+ self.KoWP = KoWP
8
+ self.EvWP = EvWP
9
+ self.COPWerte = COPWerte
10
+ self.COPT = COPT
11
+ self.Tsinkout = Tsinkout
12
+ self.pyPinch = pyPinch
13
+ self._temperatures = _temperatures
14
+ self.COPRegression = COPRegression
15
+ def drawCOPKo(self):
16
+ self.x = []
17
+ self.y = []
18
+ fig1 = plt.figure()
19
+ for i in self.KoWP[::3]:
20
+ self.x.append(i)
21
+ for i in self.COPWerte[::3]:
22
+ self.y.append(i)
23
+ plt.plot(self.x,self.y)
24
+ plt.grid(True)
25
+ plt.title('COP gegen Qpunkt Ko')#plt.title('Grand Composite Curve')
26
+ plt.xlabel('Qpunkt Ko [kW]')#plt.xlabel('Net Enthalpy Change ∆H (kW)')
27
+ plt.ylabel('COP [-]')#plt.ylabel('Shifted Temperature S (degC)')
28
+
29
+ def drawGrandCompositeCurve(self):
30
+ grandCompositeCurve = self.grandCompositeCurve
31
+ Tempplus = 0
32
+ self.heatCascade = self.pyPinch.heatCascade
33
+ plt.close('all')
34
+ fig = plt.figure()
35
+ if self.heatCascade[0]['deltaH'] > 0:
36
+ plt.plot([grandCompositeCurve['H'][0],grandCompositeCurve['H'][1]], [self.grandCompositeCurve['T'][0],self.grandCompositeCurve['T'][1]], 'tab:red')
37
+ plt.plot([grandCompositeCurve['H'][0],grandCompositeCurve['H'][1]], [self.grandCompositeCurve['T'][0],self.grandCompositeCurve['T'][1]], 'ro')
38
+ elif self.heatCascade[0]['deltaH'] < 0:
39
+ plt.plot([grandCompositeCurve['H'][0],grandCompositeCurve['H'][1]], [self.grandCompositeCurve['T'][0],self.grandCompositeCurve['T'][1]], 'tab:blue')
40
+ plt.plot([grandCompositeCurve['H'][0],grandCompositeCurve['H'][1]], [grandCompositeCurve['T'][0],grandCompositeCurve['T'][1]], 'bo')
41
+
42
+ for i in range(1, len(self._temperatures)-1):
43
+ if self.heatCascade[i]['deltaH'] > 0:
44
+ plt.plot([grandCompositeCurve['H'][i],grandCompositeCurve['H'][i+1]], [grandCompositeCurve['T'][i],grandCompositeCurve['T'][i+1]], 'tab:red')
45
+ plt.plot([grandCompositeCurve['H'][i],grandCompositeCurve['H'][i+1]], [grandCompositeCurve['T'][i],grandCompositeCurve['T'][i+1]], 'ro')
46
+ elif self.heatCascade[i]['deltaH'] < 0:
47
+ plt.plot([grandCompositeCurve['H'][i],grandCompositeCurve['H'][i+1]], [grandCompositeCurve['T'][i],grandCompositeCurve['T'][i+1]], 'tab:blue')
48
+ plt.plot([grandCompositeCurve['H'][i],grandCompositeCurve['H'][i+1]], [grandCompositeCurve['T'][i],grandCompositeCurve['T'][i+1]], 'bo')
49
+ elif self.heatCascade[i]['deltaH'] == 0 and grandCompositeCurve['H'][i]!=0:
50
+ plt.plot([grandCompositeCurve['H'][i],grandCompositeCurve['H'][i+1]], [grandCompositeCurve['T'][i],grandCompositeCurve['T'][i+1]], 'tab:blue')
51
+ plt.plot([grandCompositeCurve['H'][i],grandCompositeCurve['H'][i+1]], [grandCompositeCurve['T'][i],grandCompositeCurve['T'][i+1]], 'bo')
52
+
53
+ plt.plot(self.EvWP[-1],self.COPT[-1],'g^')
54
+ plt.plot(self.KoWP[-1],self.Tsinkout,'g^')
55
+ plt.text(0.94*self.KoWP[-1],0.93*self.Tsinkout,round(self.COPWerte[-1],2))
56
+ plt.grid(True)
57
+ name = self.processdesignation
58
+ plt.suptitle('Großverbundkurve {} °C ({})'.format(round(self.Tsinkout,1),name))#plt.title('Grand Composite Curve')
59
+ plt.title(self.COPRegression)
60
+ plt.xlabel('Nettoenthalpiestromänderung ∆H in kW')#plt.xlabel('Net Enthalpy Change ∆H (kW)')
61
+ plt.ylabel('Verschobene Temperatur in °C')#plt.ylabel('Shifted Temperature S (degC)')
pinch_tool/Modules/HeatPumpIntegration/HeatPumpIntegration.py ADDED
@@ -0,0 +1,319 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from tabulate import tabulate
2
+ from Modules.HeatPumpIntegration.HPIPlot import HPIPlot
3
+ from Modules.Utility.TemperaturePocketDeletion import TemperaturePocketDeletion as TPD
4
+
5
+ class HeatPumpIntegration():
6
+ def __init__(self,streamsDataFile, Tsinkout, pyPinch):
7
+ self.Integrationtype = None
8
+ if Tsinkout == None:
9
+ self.Integrationtype = 'Itterativ'
10
+ self.processdesignation = streamsDataFile[:-4]
11
+ self.streamsDataFile = streamsDataFile
12
+ self.options = {'draw', 'debug'}
13
+ self.KoWP = []
14
+ self.EvWP = []
15
+ self.COPwerte = []
16
+ self.COPT = []
17
+ self.SchrittweiteTemp = 0.05
18
+ self.Tsinkout = Tsinkout
19
+
20
+ self.pyPinch = pyPinch
21
+
22
+ def COP(self,T):
23
+ COPList = []
24
+ StringList = []
25
+ if 144 <= self.Tsinkout <= 212 and 25 <= self.Tsinkout-T <= 190: #Prototypical Stirling
26
+ COPList.append(1.28792 * ((self.Tsinkout-(T)) + 2 * 0.54103)**(-0.37606) * (self.Tsinkout+273 + 0.54103)**0.35992)
27
+ StringList.append('Prototypical Stirling')
28
+ if 80 <= self.Tsinkout <= 160 and 25 <= self.Tsinkout-T <= 95: #VHTHP (HFC/HFO)
29
+ COPList.append(1.9118 * ((self.Tsinkout-(T)) + 2 * 0.04419)**(-0.89094) * (self.Tsinkout+273 + 0.04419)**0.67895)
30
+ StringList.append('VHTHP (HFC/HFO)')
31
+ if 25 <= self.Tsinkout <= 100 and 10 <= self.Tsinkout-T <= 78: #SHP and HTHPs (HFC/HFO)
32
+ COPList.append(1.4480*(10**12) * ((self.Tsinkout-(T)) + 2 * 88.73)**(-4.9469))
33
+ StringList.append('SHP and HTHPs (HFC/HFO)')
34
+ if 70 <= self.Tsinkout <= 85 and 30 <= self.Tsinkout-T <= 75: #SHP and HTHPs (R717)
35
+ COPList.append(40.789 * ((self.Tsinkout-(T)) + 2 * 1.0305)**(-1.0489) * (self.Tsinkout+273 + 1.0305)**0.29998)
36
+ StringList.append('SHP and HTHPs (R717)')
37
+ if len(COPList) == 0:
38
+ COPList.append((self.Tsinkout+273.15)/(self.Tsinkout-T)*0.5) # Carnot
39
+ StringList.append('Carnot')
40
+ return max(COPList),StringList[COPList.index(max(COPList))]
41
+
42
+ def get_available_heat_pumps(self, T):
43
+ """Returns list of all heat pump types with their COPs and availability status"""
44
+ hp_list = []
45
+ delta_T = self.Tsinkout - T
46
+
47
+ # Prototypical Stirling
48
+ if 144 <= self.Tsinkout <= 212 and 25 <= delta_T <= 190:
49
+ cop = 1.28792 * ((self.Tsinkout-(T)) + 2 * 0.54103)**(-0.37606) * (self.Tsinkout+273 + 0.54103)**0.35992
50
+ hp_list.append({'name': 'Prototypical Stirling', 'cop': cop, 'available': True, 'reason': ''})
51
+ else:
52
+ hp_list.append({'name': 'Prototypical Stirling', 'cop': None, 'available': False,
53
+ 'reason': f'Requires: 144°C≤T_sink≤212°C, 25°C≤ΔT≤190°C (Current: T_sink={self.Tsinkout:.1f}°C, ΔT={delta_T:.1f}°C)'})
54
+
55
+ # VHTHP (HFC/HFO)
56
+ if 80 <= self.Tsinkout <= 160 and 25 <= delta_T <= 95:
57
+ cop = 1.9118 * ((self.Tsinkout-(T)) + 2 * 0.04419)**(-0.89094) * (self.Tsinkout+273 + 0.04419)**0.67895
58
+ hp_list.append({'name': 'VHTHP (HFC/HFO)', 'cop': cop, 'available': True, 'reason': ''})
59
+ else:
60
+ hp_list.append({'name': 'VHTHP (HFC/HFO)', 'cop': None, 'available': False,
61
+ 'reason': f'Requires: 80°C≤T_sink≤160°C, 25°C≤ΔT≤95°C (Current: T_sink={self.Tsinkout:.1f}°C, ΔT={delta_T:.1f}°C)'})
62
+
63
+ # SHP and HTHPs (HFC/HFO)
64
+ if 25 <= self.Tsinkout <= 100 and 10 <= delta_T <= 78:
65
+ cop = 1.4480*(10**12) * ((self.Tsinkout-(T)) + 2 * 88.73)**(-4.9469)
66
+ hp_list.append({'name': 'SHP and HTHPs (HFC/HFO)', 'cop': cop, 'available': True, 'reason': ''})
67
+ else:
68
+ hp_list.append({'name': 'SHP and HTHPs (HFC/HFO)', 'cop': None, 'available': False,
69
+ 'reason': f'Requires: 25°C≤T_sink≤100°C, 10°C≤ΔT≤78°C (Current: T_sink={self.Tsinkout:.1f}°C, ΔT={delta_T:.1f}°C)'})
70
+
71
+ # SHP and HTHPs (R717)
72
+ if 70 <= self.Tsinkout <= 85 and 30 <= delta_T <= 75:
73
+ cop = 40.789 * ((self.Tsinkout-(T)) + 2 * 1.0305)**(-1.0489) * (self.Tsinkout+273 + 1.0305)**0.29998
74
+ hp_list.append({'name': 'SHP and HTHPs (R717)', 'cop': cop, 'available': True, 'reason': ''})
75
+ else:
76
+ hp_list.append({'name': 'SHP and HTHPs (R717)', 'cop': None, 'available': False,
77
+ 'reason': f'Requires: 70°C≤T_sink≤85°C, 30°C≤ΔT≤75°C (Current: T_sink={self.Tsinkout:.1f}°C, ΔT={delta_T:.1f}°C)'})
78
+
79
+ # Carnot (always available)
80
+ cop_carnot = (self.Tsinkout+273.15)/(self.Tsinkout-T)*0.5
81
+ hp_list.append({'name': 'Carnot', 'cop': cop_carnot, 'available': True, 'reason': ''})
82
+
83
+ return hp_list
84
+
85
+ def COP_specific(self, T, hp_type):
86
+ """Calculate COP for a specific heat pump type"""
87
+ if hp_type == 'Prototypical Stirling':
88
+ if 144 <= self.Tsinkout <= 212 and 25 <= self.Tsinkout-T <= 190:
89
+ return 1.28792 * ((self.Tsinkout-(T)) + 2 * 0.54103)**(-0.37606) * (self.Tsinkout+273 + 0.54103)**0.35992
90
+ elif hp_type == 'VHTHP (HFC/HFO)':
91
+ if 80 <= self.Tsinkout <= 160 and 25 <= self.Tsinkout-T <= 95:
92
+ return 1.9118 * ((self.Tsinkout-(T)) + 2 * 0.04419)**(-0.89094) * (self.Tsinkout+273 + 0.04419)**0.67895
93
+ elif hp_type == 'SHP and HTHPs (HFC/HFO)':
94
+ if 25 <= self.Tsinkout <= 100 and 10 <= self.Tsinkout-T <= 78:
95
+ return 1.4480*(10**12) * ((self.Tsinkout-(T)) + 2 * 88.73)**(-4.9469)
96
+ elif hp_type == 'SHP and HTHPs (R717)':
97
+ if 70 <= self.Tsinkout <= 85 and 30 <= self.Tsinkout-T <= 75:
98
+ return 40.789 * ((self.Tsinkout-(T)) + 2 * 1.0305)**(-1.0489) * (self.Tsinkout+273 + 1.0305)**0.29998
99
+ # Fallback to Carnot
100
+ return (self.Tsinkout+273.15)/(self.Tsinkout-T)*0.5
101
+
102
+ def deleteTemperaturePockets(self):
103
+ self.pyPinch = self.pyPinch.PinchAnalyse
104
+ self.hotUtility = self.pyPinch.hotUtility
105
+ self.heatCascade = self.pyPinch.heatCascade
106
+ self._temperatures = self.pyPinch._temperatures
107
+ self.deletedPocketdict = TPD(self.hotUtility, self.heatCascade, self._temperatures).deleteTemperaturePockets()
108
+
109
+ def splitHotandCold(self):
110
+ self.splitHotTemperatures = []
111
+ self.splitColdTemperatures = []
112
+ self.splitHotH = []
113
+ self.splitColdH = []
114
+ testHot = 0
115
+ testCold = 0
116
+
117
+ for i in range(len(self.deletedPocketdict['T'][0])):
118
+ if i >= len(self.deletedPocketdict['deltaH'][0]):
119
+ continue
120
+ if self.deletedPocketdict['deltaH'][0][i] > 0 and testHot == 0:
121
+ self.splitHotTemperatures.append(self.deletedPocketdict['T'][0][i])
122
+ self.splitHotH.append(self.deletedPocketdict['H'][0][i])
123
+ self.splitHotTemperatures.append(self.deletedPocketdict['T'][0][i+1])
124
+ self.splitHotH.append(self.deletedPocketdict['H'][0][i+1])
125
+ testHot = 1
126
+
127
+ elif self.deletedPocketdict['deltaH'][0][i] > 0 and testHot == 1:
128
+ self.splitHotTemperatures.append(self.deletedPocketdict['T'][0][i+1])
129
+ self.splitHotH.append(self.deletedPocketdict['H'][0][i+1])
130
+
131
+ elif self.deletedPocketdict['deltaH'][0][i] < 0 and testCold == 0:
132
+ self.splitColdTemperatures.append(self.deletedPocketdict['T'][0][i])
133
+ self.splitColdH.append(self.deletedPocketdict['H'][0][i])
134
+ self.splitColdTemperatures.append(self.deletedPocketdict['T'][0][i+1])
135
+ self.splitColdH.append(self.deletedPocketdict['H'][0][i+1])
136
+ testCold = 1
137
+ elif self.deletedPocketdict['deltaH'][0][i] < 0 and testCold == 1:
138
+ self.splitColdTemperatures.append(self.deletedPocketdict['T'][0][i+1])
139
+ self.splitColdH.append(self.deletedPocketdict['H'][0][i+1])
140
+ elif self.deletedPocketdict['deltaH'][0][i] == 0:
141
+ if self.deletedPocketdict['deltaH'][0][i-1] < 0:
142
+ self.splitColdTemperatures.append(self.deletedPocketdict['T'][0][i+1])
143
+ self.splitColdH.append(self.deletedPocketdict['H'][0][i+1])
144
+
145
+ elif self.deletedPocketdict['deltaH'][0][i-1] > 0:
146
+ self.splitHotTemperatures.append(self.deletedPocketdict['T'][0][i+1])
147
+ self.splitHotH.append(self.deletedPocketdict['H'][0][i+1])
148
+ else:
149
+ pass
150
+
151
+ else:
152
+ pass
153
+
154
+
155
+
156
+ self.splitColddeltaH = []
157
+ self.splitHotdeltaH = []
158
+ for i in range(len(self.splitColdH)-1):
159
+ self.splitColddeltaH.append(self.splitColdH[i+1]-self.splitColdH[i])
160
+
161
+ for i in range(len(self.splitHotH)-1):
162
+ self.splitHotdeltaH.append(self.splitHotH[i+1]-self.splitHotH[i])
163
+
164
+ return {'H':self.splitHotH, 'T':self.splitHotTemperatures, 'deltaH':self.splitHotdeltaH},{'H':self.splitColdH, 'T':self.splitColdTemperatures, 'deltaH':self.splitColddeltaH}
165
+
166
+ def QpunktEv(self,T,Quelle): # FEHLER
167
+ return self.GCCSource['H'][Quelle] + ((self.GCCSource['H'][Quelle+1]-self.GCCSource['H'][Quelle])/(self.GCCSource['T'][Quelle+1]-self.GCCSource['T'][Quelle])) * (T-self.GCCSource['T'][Quelle])
168
+
169
+ def QpunktKo(self,T,Quelle):
170
+ return self.GCCSink['H'][Quelle-1] + ((self.GCCSink['H'][Quelle-1]-self.GCCSink['H'][Quelle])/(self.GCCSink['T'][Quelle-1]-self.GCCSink['T'][Quelle])) * (T-self.GCCSink['T'][Quelle-1])
171
+
172
+ def TKo(self,H,Quelle):
173
+ return self.GCCSink['T'][Quelle] - ((self.GCCSink['T'][Quelle]-self.GCCSink['T'][Quelle+1])/(self.GCCSink['H'][Quelle]-self.GCCSink['H'][Quelle+1])) * (self.GCCSink['H'][Quelle]-H)
174
+
175
+ def IntegrateHeatPump(self):
176
+ Test = 0
177
+ TSTest = 0
178
+
179
+ #Starttemperatur
180
+ if self.Integrationtype == 'Itterativ':
181
+ self.Tsinkout = self.GCCSink['T'][0]
182
+ else:
183
+ pass
184
+ Quelle = 0
185
+ self.SchrittweiteTemp = (self.GCCSource['T'][Quelle] - self.GCCSource['T'][Quelle+1])/10
186
+ T = self.GCCSource['T'][Quelle]-self.SchrittweiteTemp
187
+
188
+ while T > self.GCCSource['T'][-1]:
189
+ if T <= self.GCCSource['T'][Quelle+1]:
190
+ Quelle +=1
191
+ self.SchrittweiteTemp = (self.GCCSource['T'][Quelle] - self.GCCSource['T'][Quelle+1])/10
192
+ T = self.GCCSource['T'][Quelle]-self.SchrittweiteTemp
193
+ if self.GCCSource['deltaH'][Quelle] == 0.0:
194
+ Quelle +=1
195
+ self.SchrittweiteTemp = (self.GCCSource['T'][Quelle] - self.GCCSource['T'][Quelle+1])/10
196
+ T = self.GCCSource['T'][Quelle]-self.SchrittweiteTemp
197
+ if T < self.GCCSource['T'][Quelle+1]:
198
+ T = self.GCCSource['T'][Quelle+1]
199
+ COP = self.COP(T)
200
+ QpunktEv = self.QpunktEv(T,Quelle)
201
+ QpunktKo = QpunktEv * ((1-(1/COP[0]))**(-1))
202
+ self.COPwerte.append(round(COP[0],3))
203
+ self.EvWP.append(round(QpunktEv))
204
+ self.KoWP.append(round(QpunktKo))
205
+ self.COPT.append(T)
206
+ for i in range(len(self.GCCSink['T'])):
207
+ if self.GCCSink['T'][i] <= self.Tsinkout:
208
+ KoQuelle = i
209
+ break
210
+ if QpunktKo >= self.QpunktKo(self.Tsinkout, KoQuelle) and self.Integrationtype == None and TSTest == 1 and self.Tsinkout < self.GCCSink['T'][0]:
211
+ break
212
+ if QpunktKo >= self.QpunktKo(self.Tsinkout, KoQuelle) and self.Integrationtype == None and TSTest == 0:
213
+ if self.Tsinkout <= self.GCCSink['T'][0]:
214
+ T+=self.SchrittweiteTemp
215
+ self.SchrittweiteTemp = self.SchrittweiteTemp/200
216
+ TSTest = 1
217
+ else:
218
+ break
219
+ if QpunktKo >= self.GCCSink['H'][0] and Test == 0:
220
+ T+=self.SchrittweiteTemp
221
+ self.SchrittweiteTemp = self.SchrittweiteTemp/200
222
+ Test = 1
223
+ elif QpunktKo >= self.GCCSink['H'][0] and Test == 1:
224
+ break
225
+ T-=self.SchrittweiteTemp
226
+ if T < self.GCCSource['T'][Quelle+1]:
227
+ T = self.GCCSource['T'][Quelle+1]
228
+
229
+ if T <= self.GCCSource['T'][-1]:
230
+ T = self.GCCSource['T'][-1]
231
+ COP = self.COP(T)
232
+ QpunktEv = self.GCCSource['H'][-1]
233
+ QpunktKo = QpunktEv * (1-(1/COP[0]))**(-1)
234
+ if self.Integrationtype == 'Itterativ':
235
+ for i in range(len(self.GCCSink['H'])):
236
+ if QpunktKo >= self.GCCSink['H'][i]:
237
+ QuelleSenke = i-1
238
+ break
239
+ self.Tsinkout = self.TKo(QpunktKo,QuelleSenke)
240
+ COP = self.COP(T)
241
+ QpunktKo = QpunktEv * (1-(1/COP[0]))**(-1)
242
+ TSinktest = self.TKo(QpunktKo, QuelleSenke)
243
+ while abs(self.Tsinkout - TSinktest) >= 1:
244
+ for i in range(len(self.GCCSink['H'])):
245
+ if QpunktKo >= self.GCCSink['H'][i]:
246
+ QuelleSenke = i-1
247
+ break
248
+ self.Tsinkout = self.TKo(QpunktKo,QuelleSenke)
249
+ COP = self.COP(T)
250
+ QpunktKo = QpunktEv * (1-(1/COP[0]))**(-1)
251
+ TSinktest = self.TKo(QpunktKo, QuelleSenke)
252
+
253
+ self.COPwerte.append(COP[0])
254
+ self.EvWP.append(round(QpunktEv))
255
+ self.KoWP.append(round(QpunktKo))
256
+ self.COPT.append(T)
257
+ self.COPRegression = COP[1]
258
+ table = {'COP':self.COPwerte[::30],'QQuelle':self.EvWP[::30],'QSenke':self.KoWP[::30]}
259
+ self.tableISSP = {'Temp': self.COPT, 'COP':self.COPwerte,'QQuelle':self.EvWP,'QSenke':self.KoWP}
260
+ print(tabulate(table,headers='keys'))
261
+ print({'COP':self.COPwerte[-1],'QQuelle':self.EvWP[-1],'QSenke':self.KoWP[-1]})
262
+
263
+ def findIntegration(self):#Genaue Integration implementieren!
264
+ self.IntegrationPoint = {'Temp': [], 'COP':[],'QQuelle':[],'QSenke':[]}
265
+ for i in range(len(self.tableISSP['QSenke'])):
266
+ if self.tableISSP['QSenke'][i] >= self.GCCdraw['H'][0]:
267
+ self.IntegrationPoint['Temp'].append(self.tableISSP['Temp'][0]+self.SchrittweiteTemp)
268
+ self.IntegrationPoint['Temp'].append(self.tableISSP['Temp'][i])
269
+ self.IntegrationPoint['COP'].append(self.tableISSP['COP'][i])
270
+ self.IntegrationPoint['QQuelle'].append(self.tableISSP['QQuelle'][i])
271
+ #self.IntegrationPoint['QQuelle'].append(self.tableISSP['QQuelle'][self.tableISSP['Temp'].index(self.IntegrationPoint['Temp'][-1])])
272
+ self.IntegrationPoint['QSenke'].append(self.tableISSP['QSenke'][i])
273
+ break
274
+ else:
275
+ self.IntegrationPoint['Temp'].append(self.tableISSP['Temp'][0]+self.SchrittweiteTemp)
276
+ self.IntegrationPoint['Temp'].append(self.tableISSP['Temp'][-1])
277
+ self.IntegrationPoint['COP'].append(self.tableISSP['COP'][-1])
278
+ self.IntegrationPoint['QQuelle'].append(self.tableISSP['QQuelle'][-1])
279
+ #self.IntegrationPoint['QQuelle'].append(self.tableISSP['QQuelle'][self.tableISSP['Temp'].index(self.IntegrationPoint['Temp'][-1])])
280
+ self.IntegrationPoint['QSenke'].append(self.tableISSP['QSenke'][-1])
281
+ break
282
+
283
+ def IntegrateHeatPump_specific(self, hp_type):
284
+ """Same as IntegrateHeatPump but uses specific heat pump type"""
285
+ self.selected_hp_type = hp_type
286
+ # Store original COP method
287
+ original_COP = self.COP
288
+ # Replace COP method temporarily
289
+ def COP_wrapper(T):
290
+ cop_val = self.COP_specific(T, hp_type)
291
+ return (cop_val, hp_type) if cop_val else original_COP(T)
292
+ self.COP = COP_wrapper
293
+ # Run integration
294
+ self.IntegrateHeatPump()
295
+ # Restore original method
296
+ self.COP = original_COP
297
+
298
+
299
+ def solveforISSP(self):
300
+ self.GCCdraw = self.pyPinch.solvePinchforHPI().grandCompositeCurve
301
+ self.GCC = self.deleteTemperaturePockets()
302
+ self.IntegrateHeatPump()
303
+ self.findIntegration()
304
+ return self.IntegrationPoint
305
+
306
+ def HPI(self):
307
+ self.GCCdraw = self.pyPinch.solvePinchforHPI().grandCompositeCurve
308
+ Temperaturesdraw = []
309
+ for i in self.pyPinch.PinchAnalyse._temperatures:
310
+ Temperaturesdraw.append(i)
311
+ self.deleteTemperaturePockets()
312
+ self.GCCSource, self.GCCSink = self.splitHotandCold()
313
+ self.IntegrateHeatPump()
314
+ self.findIntegration()
315
+ HPIPlot(self.streamsDataFile[:-4],self.Tsinkout,self.pyPinch,self.EvWP,self.KoWP,
316
+ self.COPwerte,self.COPT,self.GCCdraw, Temperaturesdraw, self.COPRegression).drawCOPKo()
317
+ HPIPlot(self.streamsDataFile[:-4],self.Tsinkout,self.pyPinch,self.EvWP,self.KoWP,
318
+ self.COPwerte,self.COPT,self.GCCdraw, Temperaturesdraw, self.COPRegression).drawGrandCompositeCurve()
319
+
pinch_tool/Modules/ISSP/ISSP.py ADDED
@@ -0,0 +1,142 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from matplotlib import pyplot as plt
2
+ from Modules.Utility.Thermodynamic_Properties import ThermodynamicProperties as Props
3
+
4
+ class ISSP():
5
+ def __init__(self, streamsDataFile, TS, TProcess, batchtimeSeconds, pyPinch, HPI, fromPinch, intermediateCircuit = {}):
6
+
7
+ self.processdesignation = streamsDataFile[:-4]
8
+ self.streamsDataFile = streamsDataFile
9
+ self.options = {'draw', 'debug'}
10
+ self.fromPinch = bool(fromPinch)
11
+ self.intermediateCircuit = bool(intermediateCircuit)
12
+ self.TS = TS
13
+ self.TProcess = TProcess
14
+
15
+ self.IntegrationPoint = HPI.solveforISSP()
16
+ self.Pinch = pyPinch.solvePinchforISSP()
17
+ self.CC = self.Pinch[0]
18
+ self.pyPinch = self.Pinch[1]
19
+
20
+ self.deltaTmin = self.pyPinch.tmin
21
+
22
+ self.t = batchtimeSeconds/3600
23
+
24
+ def CCinkWh(self):
25
+ for i in range(len(self.CC['hot']['H'])):
26
+ self.CC['hot']['H'][i] = self.CC['hot']['H'][i]*self.t
27
+ self.CC['hot']['kWh'] = self.CC['hot']['H']
28
+ del self.CC['hot']['H']
29
+
30
+ for i in range(len(self.CC['cold']['H'])):
31
+ self.CC['cold']['H'][i] = self.CC['cold']['H'][i]*self.t
32
+ self.CC['cold']['kWh'] = self.CC['cold']['H']
33
+ del self.CC['cold']['H']
34
+
35
+ def ISSPHotIntermediateGerade(self, kWh, Tempdiff):
36
+ return (self.IntegrationPoint['Temp'][-1]-Tempdiff) + ((self.IntegrationPoint['Temp'][0]-self.TemperaturKorrektur) - (self.IntegrationPoint['Temp'][-1]-Tempdiff))/(self.IntegrationPoint['QQuelle'][0]*self.t) * (kWh-self.DifferenzHot)
37
+
38
+ def drawISSPHotIntermediate(self):#Verdichter
39
+ if self.fromPinch == True:
40
+ self.TemperaturKorrektur = 1.25 * self.deltaTmin
41
+ else:
42
+ self.TemperaturKorrektur = 0.25 * self.deltaTmin
43
+ deltaTZwischenlreislauf0 = 2/4 * self.deltaTmin
44
+ self.DifferenzHot = self.CC['hot']['kWh'][-1] - self.IntegrationPoint['QQuelle'][0]*self.t
45
+ self.coldUtility = self.pyPinch.coldUtility*self.t
46
+ self.hotUtility = self.pyPinch.hotUtility*self.t
47
+ self._temperatures = self.pyPinch._temperatures
48
+ self.pinchTemperature = self.pyPinch.pinchTemperature
49
+ m = 0
50
+ for i in range(len(self.CC['hot']['T'])):
51
+ if self.CC['hot']['T'][i] >= self.IntegrationPoint['Temp'][-1]:
52
+
53
+ try: m = self.CC['hot']['T'][i-1] + (self.CC['hot']['T'][i] - self.CC['hot']['T'][i-1])/(self.CC['hot']['kWh'][i] - self.CC['hot']['kWh'][i-1]) * (self.DifferenzHot-self.CC['hot']['kWh'][i-1])
54
+
55
+ except: print('error m') #m = self.CC['hot']['T'][1] + (self.CC['hot']['T'][i+1] - self.CC['hot']['T'][i])/(self.CC['hot']['kWh'][i+1] - self.CC['hot']['kWh'][i]) * (self.IntegrationPoint['QQuelle'][0]*self.t - self.CC['hot']['kWh'][i])
56
+ if m != 0:
57
+ break
58
+ if float(self.DifferenzHot) == 0.0 and float(self.CC['hot']['kWh'][-2]) == 0.0:
59
+ ZwischenkreislaufTemp = self.CC['hot']['T'][-1] - deltaTZwischenlreislauf0
60
+ else:
61
+ ZwischenkreislaufTemp = (self.CC['hot']['T'][-2] - deltaTZwischenlreislauf0) + (self.CC['hot']['T'][-2] - m) / (self.CC['hot']['kWh'][-2] - self.DifferenzHot) * (self.CC['hot']['kWh'][-1] -self.CC['hot']['kWh'][-2])
62
+ self.TemperaturKorrektur = (self.CC['hot']['T'][-2] - self.TemperaturKorrektur) + (self.CC['hot']['T'][-2] - m) / (self.CC['hot']['kWh'][-2] - self.DifferenzHot) * (self.CC['hot']['kWh'][-1] -self.CC['hot']['kWh'][-2])
63
+
64
+ self.VolumenWWSpeicher = round(self.IntegrationPoint['QQuelle'][0]*self.t * 3600 / (4.18 * (ZwischenkreislaufTemp-(m-deltaTZwischenlreislauf0)))/1000,1) #m^3
65
+ plt.close('all')
66
+ fig = plt.figure(num='{} Verd'.format(self.processdesignation))
67
+ if self.intermediateCircuit == True:
68
+ plt.plot(self.CC['hot']['kWh'], self.CC['hot']['T'], 'tab:red')
69
+ plt.plot([self.DifferenzHot,self.IntegrationPoint['QQuelle'][0]*self.t+self.DifferenzHot], [self.IntegrationPoint['Temp'][-1]-1.25*self.deltaTmin,+self.TemperaturKorrektur], 'tab:blue')
70
+ plt.plot([self.DifferenzHot,self.IntegrationPoint['QQuelle'][0]*self.t+self.DifferenzHot], [m-deltaTZwischenlreislauf0,ZwischenkreislaufTemp], 'k')
71
+
72
+ #plt.plot(self.CC['hot']['kWh'], self.CC['hot']['T'], 'ro')
73
+ plt.plot([self.DifferenzHot,self.IntegrationPoint['QQuelle'][0]*self.t+self.DifferenzHot], [self.IntegrationPoint['Temp'][-1]-1.25*self.deltaTmin,self.TemperaturKorrektur], 'bo')
74
+ plt.plot([self.DifferenzHot,self.IntegrationPoint['QQuelle'][0]*self.t+self.DifferenzHot],[(m-deltaTZwischenlreislauf0),ZwischenkreislaufTemp],'ko')
75
+ elif self.fromPinch == False:
76
+ plt.plot([0,self.IntegrationPoint['QQuelle'][0]*self.t+self.DifferenzHot], [self._temperatures[-1]+0.5*self.deltaTmin,ZwischenkreislaufTemp-self.deltaTmin], 'tab:red')
77
+ plt.plot([0,self.IntegrationPoint['QQuelle'][0]*self.t+self.DifferenzHot], [self._temperatures[-1]+0.25*self.deltaTmin,self._temperatures[-1]+0.25*self.deltaTmin])#,self.TemperaturKorrektur], 'tab:blue')
78
+ plt.plot([0,self.IntegrationPoint['QQuelle'][0]*self.t+self.DifferenzHot], [self._temperatures[-1]+0.5*self.deltaTmin,ZwischenkreislaufTemp-self.deltaTmin],linestyle = (0, (5, 10)), color = 'black')
79
+
80
+ #plt.plot(self.CC['hot']['kWh'], self.CC['hot']['T'], 'ro')
81
+ plt.plot([0,self.IntegrationPoint['QQuelle'][0]*self.t+self.DifferenzHot], [self._temperatures[-1]+0.25*self.deltaTmin,self._temperatures[-1]+0.25*self.deltaTmin], 'bo') #self.TemperaturKorrektur
82
+ plt.plot([0,self.IntegrationPoint['QQuelle'][0]*self.t+self.DifferenzHot],[self._temperatures[-1]+0.5*self.deltaTmin,ZwischenkreislaufTemp-self.deltaTmin],'ko')
83
+ elif self.fromPinch == True:
84
+ plt.plot([0,self.IntegrationPoint['QQuelle'][0]*self.t+self.DifferenzHot], [m-deltaTZwischenlreislauf0,ZwischenkreislaufTemp], 'tab:red')
85
+ plt.plot([0,self.IntegrationPoint['QQuelle'][0]*self.t+self.DifferenzHot], [self._temperatures[-1]+0.25*self.deltaTmin,self._temperatures[-1]+0.25*self.deltaTmin])#,self.TemperaturKorrektur], 'tab:blue')
86
+ plt.plot([0,self.IntegrationPoint['QQuelle'][0]*self.t+self.DifferenzHot], [m-deltaTZwischenlreislauf0,ZwischenkreislaufTemp],linestyle = (0, (5, 10)), color = 'black')
87
+
88
+ #plt.plot(self.CC['hot']['kWh'], self.CC['hot']['T'], 'ro')
89
+ plt.plot([0,self.IntegrationPoint['QQuelle'][0]*self.t+self.DifferenzHot], [self.IntegrationPoint['Temp'][-1]-1.25*self.deltaTmin,self.IntegrationPoint['Temp'][-1]-1.25*self.deltaTmin], 'bo') #self.TemperaturKorrektur
90
+ plt.plot([0,self.IntegrationPoint['QQuelle'][0]*self.t+self.DifferenzHot],[(m-deltaTZwischenlreislauf0),ZwischenkreislaufTemp],'ko')
91
+
92
+
93
+ plt.grid(True,linewidth=1.5)
94
+ plt.tick_params(axis='both', which='major', labelsize=12)
95
+ plt.title('a) ISSP stratified TES', fontsize=14)
96
+ plt.xlabel('ΔH / Batch in kWh', fontsize=14)
97
+ plt.ylabel('Shifted temperature T in °C', fontsize=14)
98
+
99
+ def drawISSPColdIntermediate(self):
100
+ deltaTZwischenkreislauf = 3/4 * self.deltaTmin
101
+ if self.fromPinch == True:
102
+ Verschiebung = 1.25 * self.deltaTmin
103
+ Verschiebung2 = 1.25 * self.deltaTmin
104
+ else:
105
+ Verschiebung = 0.25 * self.deltaTmin
106
+ Verschiebung2 = 0.25 * self.deltaTmin
107
+ self.Dampfmasse = (self.IntegrationPoint['QSenke'][0]*self.t * 3600)/Props.get_latentheat(self.TS)
108
+ #vStrich1 = 1/925.014712422 #140 °C
109
+ h1_prime = Props.get_hprime(self.TS)
110
+ h1_double_prime = Props.get_hdouble_prime(self.TS)
111
+ v1_prime = Props.get_vprime(self.TS)
112
+ h2_prime = Props.get_hprime(self.TProcess)
113
+ h2_double_prime = Props.get_hdouble_prime(self.TProcess)
114
+
115
+ Füllgrad = 0.9
116
+ self.VolumenDampfSpeicher = round(self.Dampfmasse/ ((Füllgrad/v1_prime)*((h1_prime-h2_prime)/(0.5*(h1_double_prime+h2_double_prime)-h2_prime))),1)
117
+ #http://berndglueck.de/Waermespeicher
118
+
119
+ fig = plt.figure(num='{} Kond'.format(self.processdesignation))
120
+ if self.intermediateCircuit == True:
121
+ plt.plot([self.CC['cold']['kWh'][-1],self.IntegrationPoint['QSenke'][0]*self.t+self.CC['cold']['kWh'][-1]], [self.CC['cold']['T'][-1]+Verschiebung,self.CC['cold']['T'][0]+Verschiebung], 'tab:red')
122
+ plt.plot(self.CC['cold']['kWh'], self.CC['cold']['T'], 'tab:blue')
123
+ plt.plot([self.CC['cold']['kWh'][-1],self.IntegrationPoint['QSenke'][0]*self.t+self.CC['cold']['kWh'][-1]], [self.CC['cold']['T'][-1]+Verschiebung-deltaTZwischenkreislauf,self.CC['cold']['T'][0]+Verschiebung-deltaTZwischenkreislauf], 'k')
124
+
125
+ plt.plot([self.CC['cold']['kWh'][-1],self.IntegrationPoint['QSenke'][0]*self.t+self.CC['cold']['kWh'][-1]], [self.CC['cold']['T'][-1]+Verschiebung,self.CC['cold']['T'][0]+Verschiebung], 'ro')
126
+ plt.plot(self.CC['cold']['kWh'], self.CC['cold']['T'], 'bo')
127
+ plt.plot([self.CC['cold']['kWh'][-1],self.IntegrationPoint['QSenke'][0]*self.t+self.CC['cold']['kWh'][-1]], [self.CC['cold']['T'][-1]+Verschiebung-deltaTZwischenkreislauf,self.CC['cold']['T'][0]+Verschiebung-deltaTZwischenkreislauf], 'ko')
128
+ else:
129
+ plt.plot([0,self.IntegrationPoint['QSenke'][0]*self.t], [self.CC['cold']['T'][0]+Verschiebung2,self.CC['cold']['T'][0]+Verschiebung2], 'tab:red')
130
+ plt.plot([0,self.IntegrationPoint['QSenke'][0]*self.t], [self.CC['cold']['T'][-1]+Verschiebung-deltaTZwischenkreislauf,self.CC['cold']['T'][0]+Verschiebung-deltaTZwischenkreislauf], 'tab:blue')
131
+ plt.plot([0,self.IntegrationPoint['QSenke'][0]*self.t], [self.CC['cold']['T'][-1]+Verschiebung-deltaTZwischenkreislauf,self.CC['cold']['T'][0]+Verschiebung-deltaTZwischenkreislauf],linestyle = (0, (5, 10)), color = 'black')
132
+
133
+ plt.plot([0,self.IntegrationPoint['QSenke'][0]*self.t], [self.CC['cold']['T'][0]+Verschiebung2,self.CC['cold']['T'][0]+Verschiebung2], 'ro')
134
+ #plt.plot(self.CC['cold']['kWh'], self.CC['cold']['T'], 'bo')
135
+ plt.plot([0,self.IntegrationPoint['QSenke'][0]*self.t], [self.CC['cold']['T'][-1]+Verschiebung-deltaTZwischenkreislauf,self.CC['cold']['T'][0]+Verschiebung-deltaTZwischenkreislauf], 'ko')
136
+
137
+ plt.grid(True,linewidth=1.5)
138
+ plt.tick_params(axis='both', which='major', labelsize=12)
139
+ plt.title('b) ISSP steam RSA', fontsize=14)
140
+ plt.xlabel('ΔH / Batch in kWh', fontsize=14)
141
+ plt.xlim(right = 2500)
142
+ plt.ylabel('Shifted temperature T in °C', fontsize=14)
pinch_tool/Modules/Pinch/Pinch.py ADDED
@@ -0,0 +1,366 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #Based on:
2
+ #!/usr/bin/env python3
3
+ # -*- coding: utf-8 -*-
4
+ # File : PyPinch.py
5
+ # License : License: GNU v3.0
6
+ # Author : Andrei Leonard Nicusan <aln705@student.bham.ac.uk>
7
+ # Date : 25.05.2019
8
+
9
+ import csv
10
+ import os
11
+ import numpy as np
12
+ from Modules.Pinch.Streams import Streams
13
+ from Modules.Pinch.PinchPlot import PinchPlot
14
+ from Modules.Pinch.PinchExport import PinchExport
15
+
16
+
17
+ class Pinch:
18
+
19
+ def __init__(self, streamsDataFile, options = {}):
20
+
21
+ self.tmin = 0
22
+ self.streams = []
23
+ self.temperatureInterval = []
24
+ self.problemTable = []
25
+ self.hotUtility = 0
26
+ self.coldUtility = 0
27
+ self.unfeasibleHeatCascade = []
28
+ self.heatCascade = []
29
+ self.pinchTemperature = 0
30
+ self.shiftedCompositeDiagram = {'hot': {'H': [], 'T': []}, 'cold': {'H': [], 'T': []}}
31
+ self.compositeDiagram = {'hot': {'H': [], 'T': []}, 'cold': {'H': [], 'T': []}}
32
+ self.grandCompositeCurve = {'H': [], 'T': []}
33
+
34
+ self._temperatures = []
35
+ self._deltaHHot = []
36
+ self._deltaHCold = []
37
+ self._options = {'debug': False, 'draw': False, 'csv': False}
38
+ self._temperaturesHOT= []
39
+ self._temperaturesCOLD = []
40
+ self.emptyintervalstartHOT= []
41
+ self.emptyintervalstartCOLD = []
42
+ self.lastHotStream = 0
43
+ self.lastColdStream = 0
44
+
45
+ self.emptyintervalsHot = {'H': [], 'T': []}
46
+ self.emptyintervalsCold = {'H': [], 'T': []}
47
+
48
+ self.processdesignation = streamsDataFile[:-4]
49
+ path = r"{} Pinch".format(self.processdesignation)
50
+ self.newpath = 'Output'+ '\\' + path
51
+
52
+ self.streams = Streams(streamsDataFile)
53
+ self.tmin = self.streams.tmin
54
+
55
+ if 'debug' in options:
56
+ self._options['debug'] = True
57
+ if 'draw' in options:
58
+ self._options['draw'] = True
59
+ if 'csv' in options:
60
+ self._options['csv'] = True
61
+
62
+
63
+ def shiftTemperatures(self):
64
+ for stream in self.streams:
65
+ if stream['type'] == 'HOT':
66
+ stream['ss'] = stream['ts'] - self.tmin / 2
67
+ stream['st'] = stream['tt'] - self.tmin / 2
68
+ else:
69
+ stream['ss'] = stream['ts'] + self.tmin / 2
70
+ stream['st'] = stream['tt'] + self.tmin / 2
71
+
72
+ if self._options['debug'] == True:
73
+ print("\nStreams: ")
74
+ for stream in self.streams:
75
+ print(stream)
76
+ print("Tmin = {}".format(self.tmin))
77
+
78
+
79
+ def constructTemperatureInterval(self):
80
+ # Take all shifted temperatures and reverse sort them,
81
+ # removing all duplicates
82
+ for stream in self.streams:
83
+ self._temperatures.append(stream['ss'])
84
+ self._temperatures.append(stream['st'])
85
+
86
+ if (stream["type"] == "HOT"):
87
+ self._temperaturesHOT.append(stream['ss'])
88
+ self._temperaturesHOT.append(stream['st'])
89
+
90
+ else:
91
+ self._temperaturesCOLD.append(stream['ss'])
92
+ self._temperaturesCOLD.append(stream['st'])
93
+
94
+
95
+ self._temperaturesHOT = list(set(self._temperaturesHOT))
96
+ self._temperaturesHOT.sort()
97
+ self._temperaturesCOLD = list(set(self._temperaturesCOLD))
98
+ self._temperaturesCOLD.sort()
99
+ self._temperatures = list(set(self._temperatures))
100
+ self._temperatures.sort(reverse = True)
101
+
102
+ # Save the stream number of all the streams that pass
103
+ # through each shifted temperature interval
104
+ for i in range(len(self._temperatures) - 1):
105
+ t1 = self._temperatures[i]
106
+ t2 = self._temperatures[i + 1]
107
+ interval = {'t1': t1, 't2': t2, 'streamNumbers': []}
108
+
109
+
110
+ j = 0
111
+ for stream in self.streams:
112
+ if (stream['type'] == 'HOT'):
113
+ if (stream['ss'] >= t1 and stream['st'] <= t2):
114
+ interval['streamNumbers'].append(j)
115
+ else:
116
+ if (stream['st'] >= t1 and stream['ss'] <= t2):
117
+ interval['streamNumbers'].append(j)
118
+ j = j + 1
119
+
120
+ self.temperatureInterval.append(interval)
121
+
122
+
123
+
124
+ if self._options['debug'] == True:
125
+ print("\nTemperature Intervals: ")
126
+ i = 0
127
+ print(self._temperatures)
128
+ for interval in self.temperatureInterval:
129
+ print("Interval {} : {}".format(i, interval))
130
+ i = i + 1
131
+
132
+ if self._options['draw'] == True:
133
+ PinchPlot().drawTemperatureInterval(self._temperatures, self.streams)
134
+
135
+
136
+
137
+ def constructProblemTable(self):
138
+
139
+ for interval in self.temperatureInterval:
140
+ row = {}
141
+ row['deltaS'] = interval['t1'] - interval['t2']
142
+ row['deltaCP'] = 0
143
+
144
+ for i in interval['streamNumbers']:
145
+ if interval['streamNumbers'] != []:
146
+ if self.streams.streamsData[i]['type'] == 'HOT':
147
+ row['deltaCP'] = row['deltaCP'] + self.streams.streamsData[i]['cp']
148
+ else:
149
+ row['deltaCP'] = row['deltaCP'] - self.streams.streamsData[i]['cp']
150
+ else:
151
+ row['deltaCP'] = 0
152
+
153
+ row['deltaH'] = row['deltaS'] * row['deltaCP']
154
+ self.problemTable.append(row)
155
+
156
+ if self._options['debug'] == True:
157
+ print("\nProblem Table: ")
158
+ i = 0
159
+ for interval in self.problemTable:
160
+ print("Interval {} : {}".format(i, interval))
161
+ i = i + 1
162
+
163
+ if self._options['draw'] == True:
164
+ PinchPlot().drawProblemTable(self.problemTable, self._temperatures)
165
+
166
+ if self._options['csv'] == True:
167
+ PinchExport().csvProblemTable(self.problemTable, self._temperatures, self.newpath)
168
+
169
+
170
+ def constructHeatCascade(self):
171
+
172
+ exitH = 0
173
+ lowestExitH = 0
174
+
175
+ i = 0
176
+ pinchInterval = 0
177
+ for interval in self.problemTable:
178
+ row = {}
179
+ row['deltaH'] = interval['deltaH']
180
+
181
+ exitH = exitH + row['deltaH']
182
+ row['exitH'] = exitH
183
+ if exitH < lowestExitH:
184
+ lowestExitH = exitH
185
+ pinchInterval = i
186
+
187
+ self.unfeasibleHeatCascade.append(row)
188
+ i = i + 1
189
+
190
+ self.hotUtility = -lowestExitH
191
+ exitH = self.hotUtility
192
+
193
+ for interval in self.problemTable:
194
+ row = {}
195
+ row['deltaH'] = interval['deltaH']
196
+
197
+ exitH = exitH + row['deltaH']
198
+ row['exitH'] = exitH
199
+
200
+ self.heatCascade.append(row)
201
+
202
+ self.coldUtility = exitH
203
+ if pinchInterval == 0:
204
+ self.pinchTemperature = self.temperatureInterval[pinchInterval]['t2']
205
+ else:
206
+ self.pinchTemperature = self.temperatureInterval[pinchInterval]['t2']
207
+
208
+ if self._options['debug'] == True:
209
+ print("\nUnfeasible Heat Cascade: ")
210
+ i = 0
211
+ for interval in self.unfeasibleHeatCascade:
212
+ print("Interval {} : {}".format(i, interval))
213
+ i = i + 1
214
+
215
+ print("\nFeasible Heat Cascade: ")
216
+ i = 0
217
+ for interval in self.heatCascade:
218
+ print("Interval {} : {}".format(i, interval))
219
+ i = i + 1
220
+
221
+ print("\nPinch Temperature (degC): {}".format(self.pinchTemperature))
222
+ print("Minimum Hot Utility (kW): {}".format(self.hotUtility))
223
+ print("Minimum Cold Utility (kW): {}".format(self.coldUtility))
224
+
225
+ if self._options['draw'] == True:
226
+ PinchPlot().drawHeatCascade(self.unfeasibleHeatCascade, self.heatCascade, self.hotUtility)
227
+
228
+ if self._options['csv'] == True:
229
+ PinchExport().csvHeatCascade(self.unfeasibleHeatCascade, self.hotUtility, self.heatCascade, self.pinchTemperature, self.newpath)
230
+
231
+
232
+ def constructShiftedCompositeDiagram(self, localisation):
233
+ emptylist = []
234
+ for interval in self.temperatureInterval:
235
+ hotH = 0
236
+ coldH = 0
237
+ # Add CP values for the hot and cold streams
238
+ # in a given temperature interval
239
+ for i in interval['streamNumbers']:
240
+ if interval['streamNumbers'] != []:
241
+ if self.streams.streamsData[i]['type'] == 'HOT':
242
+ hotH = hotH + self.streams.streamsData[i]['cp']
243
+ #self.shiftedCompositeDiagram['hot']['T'].append(self.streamsData[i]['ss'])
244
+ #self.shiftedCompositeDiagram['hot']['T'].append(self.streamsData[i]['st'])
245
+ else:
246
+ coldH = coldH + self.streams.streamsData[i]['cp']
247
+ #self.shiftedCompositeDiagram['cold']['T'].append(self.streamsData[i]['ss'])
248
+ #self.shiftedCompositeDiagram['cold']['T'].append(self.streamsData[i]['st'])
249
+ else:
250
+ hotH = 0
251
+ emptylist.append(i)
252
+ # Enthalpy = CP * deltaT
253
+ #checken ob geprüftes interval einen heißen strom enthält und erst dann anfangen
254
+ #dann immer wieder prüfen, ob danach noch ein heoßer strom kommt
255
+
256
+ hotH = hotH * (interval['t1'] - interval['t2'])
257
+ self._deltaHHot.append(hotH)
258
+
259
+
260
+
261
+ coldH = coldH * (interval['t1'] - interval['t2'])
262
+ self._deltaHCold.append(coldH)
263
+
264
+
265
+ # rot bei 0/t1 anfangen
266
+ # blau bei coldutility/t2 anfangen
267
+ self.shiftedCompositeDiagram['hot']['T']= []
268
+
269
+ self._deltaHHot.reverse()
270
+ self.shiftedCompositeDiagram['hot']['H'].append(0.0)
271
+ for i in range(1, len(self._temperatures)):
272
+ self.shiftedCompositeDiagram['hot']['H'].append(self.shiftedCompositeDiagram['hot']['H'][-1] + self._deltaHHot[i-1])
273
+ self.shiftedCompositeDiagram['hot']['T'].append(self._temperatures[len(self._temperatures)-i])
274
+
275
+
276
+ self.shiftedCompositeDiagram['hot']['T'].append(self._temperatures[0])
277
+ #Summe aus allen deltaHCold + coldutility machen und für die Schritte jeweils deltaHCold abziehen
278
+ coldgesamt = self.coldUtility
279
+ for i in range(len(self._deltaHCold)):
280
+ coldgesamt += self._deltaHCold[i]
281
+
282
+ #Experimentell
283
+ self.shiftedCompositeDiagram['cold']['T']= []
284
+
285
+ self.shiftedCompositeDiagram['cold']['H'].append(coldgesamt)
286
+
287
+ #Experimentell
288
+ self.shiftedCompositeDiagram['cold']['T'].append(self._temperatures[0])
289
+ for i in range(1, len(self._temperatures)):
290
+ self.shiftedCompositeDiagram['cold']['H'].append(self.shiftedCompositeDiagram['cold']['H'][-1] - self._deltaHCold[i-1])
291
+ self.shiftedCompositeDiagram['cold']['T'].append(self._temperatures[i])
292
+
293
+
294
+ iliste = []
295
+ for i in range(1,(len(self.shiftedCompositeDiagram['cold']['H'])-1)):
296
+ if self.shiftedCompositeDiagram['cold']['H'] == 0.0:
297
+ iliste.append(i+1)
298
+ elif self.shiftedCompositeDiagram['cold']['H'][i] == self.shiftedCompositeDiagram['cold']['H'][0]:
299
+ iliste.append(i-1)
300
+ elif self.shiftedCompositeDiagram['cold']['H'][i] == self.shiftedCompositeDiagram['cold']['H'][-1]:
301
+ iliste.append(i+1)
302
+ iliste.reverse()
303
+
304
+ for i in iliste:
305
+ self.shiftedCompositeDiagram['cold']['H'].pop(i)
306
+ self.shiftedCompositeDiagram['cold']['T'].pop(i)
307
+ iliste = []
308
+ for i in range(1,(len(self.shiftedCompositeDiagram['hot']['H'])-1)):
309
+ if self.shiftedCompositeDiagram['hot']['H'][i] == 0.0:
310
+ iliste.append(i-1)
311
+ elif self.shiftedCompositeDiagram['hot']['H'][i] == self.shiftedCompositeDiagram['hot']['H'][-1]:
312
+ iliste.append(i+1)
313
+ iliste.reverse()
314
+
315
+ for i in iliste:
316
+ self.shiftedCompositeDiagram['hot']['H'].pop(i)
317
+ self.shiftedCompositeDiagram['hot']['T'].pop(i)
318
+
319
+ if self._options['draw'] == True:
320
+ PinchPlot().drawShiftedCompositeDiagram(self.shiftedCompositeDiagram, self.coldUtility,
321
+ self._temperatures, self.hotUtility, self.pinchTemperature,
322
+ self.processdesignation, localisation)
323
+
324
+ if self._options['csv'] == True:
325
+ PinchExport().csvShiftedCompositeDiagram(self.newpath, self.shiftedCompositeDiagram)
326
+
327
+
328
+
329
+ def constructCompositeDiagram(self, localisation):
330
+ self.compositeDiagram['hot']['T'] = [x + self.tmin / 2 for x in self.shiftedCompositeDiagram['hot']['T']]
331
+ self.compositeDiagram['hot']['H'] = self.shiftedCompositeDiagram['hot']['H']
332
+ self.compositeDiagram['cold']['T'] = [x - self.tmin / 2 for x in self.shiftedCompositeDiagram['cold']['T']]
333
+ self.compositeDiagram['cold']['H'] = self.shiftedCompositeDiagram['cold']['H']
334
+ print(self._temperatures)
335
+ if self._options['draw'] == True:
336
+ PinchPlot().drawCompositeDiagram(self.compositeDiagram, self.shiftedCompositeDiagram,
337
+ self.coldUtility, self._temperatures, self.tmin, self.hotUtility,
338
+ self.pinchTemperature, self.processdesignation, localisation)
339
+
340
+ if self._options['csv'] == True:
341
+ PinchExport().csvCompositeDiagram(self.newpath, self.compositeDiagram)
342
+
343
+
344
+
345
+ def constructGrandCompositeCurve(self,localisation):
346
+ self.grandCompositeCurve['H'].append(self.hotUtility)
347
+ self.grandCompositeCurve['T'].append(self._temperatures[0])
348
+ for i in range(1, len(self._temperatures)):
349
+ self.grandCompositeCurve['H'].append(self.heatCascade[i - 1]['exitH'])
350
+ self.grandCompositeCurve['T'].append(self._temperatures[i])
351
+ print(self.grandCompositeCurve)
352
+ print(self.heatCascade)
353
+
354
+ if self._options['debug'] == True:
355
+ print("\nGrand Composite Curve: ")
356
+ print("Net H (kW): {}".format(self.grandCompositeCurve['H']))
357
+ print("T (degC): {}".format(self.grandCompositeCurve['T']))
358
+
359
+
360
+ if self._options['draw'] == True:
361
+ PinchPlot().drawGrandCompositeCurve(self.processdesignation, self.heatCascade,
362
+ self.grandCompositeCurve, self._temperatures, self.pinchTemperature, localisation)
363
+
364
+ if self._options['csv'] == True:
365
+ PinchExport().csvGrandCompositeCurve(self.newpath, self.grandCompositeCurve)
366
+
pinch_tool/Modules/Pinch/PinchExport.py ADDED
@@ -0,0 +1,129 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import csv
3
+
4
+ class PinchExport:
5
+ def __init__(self):
6
+ pass
7
+ def csvProblemTable(self, problemTable, _temperatures, newpath):
8
+ colLabels = ['$Interval: S_i - S_{i+1}$', '$\\Delta T (\\degree C)$', '$\\Delta CP (kW / \\degree C)$', '$\\Delta H (kW)$', '']
9
+ cellText = []
10
+
11
+ i = 1
12
+ for interval in problemTable:
13
+ cellRow = []
14
+ cellRow.extend(['{}: {} - {}'.format(i, _temperatures[i - 1], _temperatures[i]),
15
+ interval['deltaS'], interval['deltaCP'], interval['deltaH']])
16
+
17
+ if interval['deltaH'] > 0:
18
+ cellRow.append('Surplus')
19
+ elif interval['deltaH'] == 0:
20
+ cellRow.append('-')
21
+ else:
22
+ cellRow.append('Deficit')
23
+ cellText.append(cellRow)
24
+ i = i + 1
25
+
26
+
27
+ if not os.path.exists(newpath):
28
+ os.makedirs(newpath)
29
+ fileName = 'ProblemTable.csv'
30
+
31
+ path = os.path.join(newpath, fileName)
32
+ with open(path, 'w', newline='') as f:
33
+ writer = csv.writer(f, delimiter=',')
34
+ writer.writerow(['Interval: S1 - S2', 'delta T (degC)', 'delta CP (kW / degC)', 'delta H (kW)', ''])
35
+ for rowText in cellText:
36
+ writer.writerow(rowText)
37
+
38
+ def csvHeatCascade(self, unfeasibleHeatCascade, hotUtility, heatCascade, pinchTemperature, newpath):
39
+ cellText = [['Unfeasible Heat Cascade: ']]
40
+ cellText.append(['', '', 'Hot Utility: 0 kW'])
41
+ cellText.append(['Interval', 'Delta H (kW)', 'Exit H (total kW)'])
42
+
43
+ i = 1
44
+ for interval in unfeasibleHeatCascade:
45
+ cellText.append([str(i), interval['deltaH'], interval['exitH']])
46
+ i = i + 1
47
+
48
+ cellText.append(['', '', 'Cold Utility: {} kW'.format(unfeasibleHeatCascade[-1]['exitH'])])
49
+ cellText.append([''])
50
+
51
+ cellText.append(['Feasible Heat Cascade: '])
52
+ cellText.append(['', '', 'Hot Utility: {} kW'.format(hotUtility)])
53
+ cellText.append(['Interval', 'Delta H (kW)', 'Exit H (total kW)'])
54
+
55
+ i = 1
56
+ for interval in heatCascade:
57
+ cellText.append([str(i), interval['deltaH'], interval['exitH']])
58
+ i = i + 1
59
+
60
+ cellText.append(['', '', 'Cold Utility: {} kW'.format(heatCascade[-1]['exitH'])])
61
+ cellText.append(['','', 'Pinch Temperature: {} degC'.format(pinchTemperature)])
62
+
63
+ if not os.path.exists(newpath):
64
+ os.makedirs(newpath)
65
+ fileName = 'HeatCascade.csv'
66
+
67
+ path = os.path.join(newpath, fileName)
68
+
69
+ with open(path, 'w', newline='') as f:
70
+ writer = csv.writer(f, delimiter=',')
71
+ for rowText in cellText:
72
+ writer.writerow(rowText)
73
+
74
+
75
+ def csvShiftedCompositeDiagram(self, newpath, shiftedCompositeDiagram):
76
+ if not os.path.exists(newpath):
77
+ os.makedirs(newpath)
78
+ fileName = 'ShiftedCompositeDiagram.csv'
79
+
80
+ path = os.path.join(newpath, fileName)
81
+
82
+ with open(path, 'w', newline='') as f:
83
+ writer = csv.writer(f, delimiter=',')
84
+ writer.writerow(['Hot H', 'Hot T'])
85
+ for i in range(0, len(shiftedCompositeDiagram['hot']['H'])):
86
+ writer.writerow([shiftedCompositeDiagram['hot']['H'][i],
87
+ shiftedCompositeDiagram['hot']['T'][i]])
88
+
89
+ writer.writerow([''])
90
+ writer.writerow(['Cold H', 'Cold T'])
91
+ for i in range(0, len(shiftedCompositeDiagram['cold']['H'])):
92
+ writer.writerow([shiftedCompositeDiagram['cold']['H'][i],
93
+ shiftedCompositeDiagram['cold']['T'][i]])
94
+
95
+
96
+ def csvCompositeDiagram(self, newpath, compositeDiagram):
97
+ if not os.path.exists(newpath):
98
+ os.makedirs(newpath)
99
+ fileName = 'CompositeDiagram.csv'
100
+
101
+ path = os.path.join(newpath, fileName)
102
+ with open(path, 'w', newline='') as f:
103
+ writer = csv.writer(f, delimiter=',')
104
+ writer.writerow(['Hot H', 'Hot T'])
105
+ for i in range(0, len(compositeDiagram['hot']['H'])):
106
+ writer.writerow([compositeDiagram['hot']['H'][i],
107
+ compositeDiagram['hot']['T'][i]])
108
+
109
+ writer.writerow([''])
110
+ writer.writerow(['Cold H', 'Cold T'])
111
+ for i in range(0, len(compositeDiagram['cold']['H'])):
112
+ writer.writerow([compositeDiagram['cold']['H'][i],
113
+ compositeDiagram['cold']['T'][i]])
114
+
115
+
116
+
117
+ def csvGrandCompositeCurve(self, newpath, grandCompositeCurve):
118
+ if not os.path.exists(newpath):
119
+ os.makedirs(newpath)
120
+ fileName = 'GrandCompositeCurve.csv'
121
+
122
+ path = os.path.join(newpath, fileName)
123
+ with open(path, 'w', newline='') as f:
124
+ writer = csv.writer(f, delimiter=',')
125
+ writer.writerow(['Net H (kW)', 'T(degC)'])
126
+ for i in range(0, len(grandCompositeCurve['H'])):
127
+ writer.writerow([grandCompositeCurve['H'][i],
128
+ grandCompositeCurve['T'][i]])
129
+
pinch_tool/Modules/Pinch/PinchPlot.py ADDED
@@ -0,0 +1,193 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import matplotlib.pyplot as plt
2
+
3
+ class PinchPlot:
4
+
5
+ def drawTemperatureInterval(self, _temperatures, streams):
6
+ fig, ax = plt.subplots()
7
+
8
+ plt.title('Shifted Temperature Interval Diagram')
9
+ plt.ylabel('Shifted Temperature S (degC)')
10
+ ax.set_xticklabels([])
11
+
12
+ xOffset = 50
13
+
14
+ for temperature in _temperatures:
15
+ plt.plot([0, xOffset * (streams.numberOf + 1)], [temperature, temperature], ':k', alpha=0.8)
16
+
17
+ arrow_width = streams.numberOf * 0.05
18
+ head_width = arrow_width * 15
19
+ head_length = _temperatures[0] * 0.02
20
+ i = 1
21
+ for stream in streams:
22
+ if stream['type'] == 'HOT':
23
+ plt.text(xOffset, stream['ss'], str(i), bbox=dict(boxstyle='round', alpha=1, fc='tab:red', ec="k"))
24
+ plt.arrow(xOffset, stream['ss'], 0, stream['st'] - stream['ss'], color='tab:red', ec='k', alpha=1,
25
+ length_includes_head=True, width=arrow_width, head_width=head_width, head_length=head_length)
26
+ else:
27
+ plt.text(xOffset, stream['ss'], str(i), bbox=dict(boxstyle='round', alpha=1, fc='tab:blue', ec="k"))
28
+ plt.arrow(xOffset, stream['ss'], 0, stream['st'] - stream['ss'], color='tab:blue', ec='k', alpha=1,
29
+ length_includes_head=True, width=arrow_width, head_width=head_width, head_length=head_length)
30
+ xOffset = xOffset + 50
31
+ i = i + 1
32
+
33
+ def drawProblemTable(self, problemTable, _temperatures):
34
+ fig, ax = plt.subplots(figsize=(6, 6))
35
+ ax.axis('tight')
36
+ ax.axis('off')
37
+ ax.set_title('Problem Table')
38
+
39
+ colLabels = ['$Interval: S_i - S_{i+1}$', '$\\Delta T (\\degree C)$', '$\\Delta CP (kW / \\degree C)$', '$\\Delta H (kW)$', '']
40
+ cellText = []
41
+
42
+ i = 1
43
+ for interval in problemTable:
44
+ cellRow = []
45
+ cellRow.extend(['{}: {} - {}'.format(i, _temperatures[i - 1], _temperatures[i]),
46
+ interval['deltaS'], interval['deltaCP'], interval['deltaH']])
47
+
48
+ if interval['deltaH'] > 0:
49
+ cellRow.append('Surplus')
50
+ elif interval['deltaH'] == 0:
51
+ cellRow.append('-')
52
+ else:
53
+ cellRow.append('Deficit')
54
+ cellText.append(cellRow)
55
+ i = i + 1
56
+
57
+ table = ax.table(cellText=cellText, colLabels=colLabels, loc='center')
58
+ table.auto_set_column_width([0, 1, 2, 3, 4])
59
+ table.scale(1.3, 1.3)
60
+
61
+
62
+ def drawHeatCascade(self, unfeasibleHeatCascade, heatCascade, hotUtility):
63
+ fig, axs = plt.subplots(1, 2, figsize=(10, 6))
64
+ axs[0].axis('auto')
65
+ axs[0].axis('off')
66
+ axs[1].axis('auto')
67
+ axs[1].axis('off')
68
+
69
+ axs[0].set_title('Unfeasible Heat Cascade')
70
+ axs[1].set_title('Feasible Heat Cascade')
71
+
72
+ cellText = []
73
+ cellText.append(['', '', 'Hot Utility: 0'])
74
+ cellText.append(['Interval', '$\\Delta H (kW)$', 'Exit H (total kW)'])
75
+
76
+ i = 1
77
+ for interval in unfeasibleHeatCascade:
78
+ cellText.append([str(i), interval['deltaH'], interval['exitH']])
79
+ i = i + 1
80
+
81
+ cellText.append(['', '', 'Cold Utility: {}'.format(unfeasibleHeatCascade[-1]['exitH'])])
82
+ table = axs[0].table(cellText=cellText, loc='center')
83
+ table.auto_set_column_width([0, 1, 2])
84
+ table.scale(1.3, 1.3)
85
+
86
+ cellText = []
87
+ cellText.append(['', '', 'Hot Utility: {}'.format(hotUtility)])
88
+ cellText.append(['Interval', '$\\Delta H (kW)$', 'Exit H (total kW)'])
89
+
90
+ i = 1
91
+ for interval in heatCascade:
92
+ cellText.append([str(i), interval['deltaH'], interval['exitH']])
93
+ i = i + 1
94
+
95
+ cellText.append(['', '', 'Cold Utility: {}'.format(heatCascade[-1]['exitH'])])
96
+ table = axs[1].table(cellText=cellText, loc='center')
97
+ table.auto_set_column_width([0, 1, 2])
98
+ table.scale(1.3, 1.3)
99
+
100
+
101
+ def drawShiftedCompositeDiagram(self, shiftedCompositeDiagram, coldUtility, _temperatures, hotUtility, pinchTemperature, processdesignation, localisation):
102
+ fig = plt.figure()
103
+ plt.plot(shiftedCompositeDiagram['hot']['H'], shiftedCompositeDiagram['hot']['T'], 'tab:red')
104
+ plt.plot(shiftedCompositeDiagram['cold']['H'], shiftedCompositeDiagram['cold']['T'], 'tab:blue')
105
+
106
+ plt.plot(shiftedCompositeDiagram['hot']['H'], shiftedCompositeDiagram['hot']['T'], 'ro')
107
+ plt.plot(shiftedCompositeDiagram['cold']['H'], shiftedCompositeDiagram['cold']['T'], 'bo')
108
+
109
+ maxColdH = max(shiftedCompositeDiagram['cold']['H'])
110
+
111
+ try:
112
+ pinchIndex = shiftedCompositeDiagram['cold']['T'].index(pinchTemperature)
113
+ pinchH = shiftedCompositeDiagram['cold']['H'][pinchIndex]
114
+ plt.plot([pinchH, pinchH], [_temperatures[0], _temperatures[-1]], ':')
115
+ except ValueError:
116
+ pass
117
+
118
+ a = plt.fill_between([coldUtility, shiftedCompositeDiagram['cold']['H'][0]-hotUtility], [shiftedCompositeDiagram['cold']['T'][0]])
119
+ a.set_hatch('\\')
120
+ a.set_facecolor('w')
121
+ plt.grid(True)
122
+ if localisation == 'DE':
123
+ plt.title('Verschobene Verbundkurven ({})'.format(processdesignation))#plt.title('Shifted Temperature-Enthalpy Composite Diagram')
124
+ plt.xlabel('Enthalpiestrom H in kW')
125
+ plt.ylabel('Verschobene Temperatur in °C')
126
+ elif localisation == 'EN':
127
+ plt.title('Shifted Composite Diagram')
128
+ plt.xlabel('Enthalpy H in kW')
129
+ plt.ylabel('Shifted Temperature T in °C')
130
+
131
+ def drawCompositeDiagram(self, compositeDiagram, shiftedCompositeDiagram, coldUtility,
132
+ _temperatures, tmin, hotUtility, pinchTemperature, processdesignation, localisation):
133
+ fig = plt.figure()
134
+ plt.plot(compositeDiagram['hot']['H'], compositeDiagram['hot']['T'], 'tab:red')
135
+ plt.plot(compositeDiagram['cold']['H'], compositeDiagram['cold']['T'], 'tab:blue')
136
+
137
+ plt.plot(compositeDiagram['hot']['H'], compositeDiagram['hot']['T'], 'ro')
138
+ plt.plot(compositeDiagram['cold']['H'], compositeDiagram['cold']['T'], 'bo')
139
+
140
+ maxColdH = max(compositeDiagram['cold']['H'])
141
+
142
+ try:
143
+ pinchIndex = shiftedCompositeDiagram['cold']['T'].index(pinchTemperature)
144
+ pinchH = shiftedCompositeDiagram['cold']['H'][pinchIndex]
145
+ plt.plot([pinchH, pinchH], [_temperatures[0], _temperatures[-1]], ':')
146
+ except ValueError:
147
+ pass
148
+
149
+ plt.grid(True)
150
+ if localisation == 'DE':
151
+ plt.title('Verbundkurven ({})'.format(processdesignation))#plt.title('Shifted Temperature-Enthalpy Composite Diagram')
152
+ plt.xlabel('Enthalpiestrom H in kW')
153
+ plt.ylabel('Temperatur in °C')
154
+ elif localisation == 'EN':
155
+ plt.title('Composite Diagram ({})'.format(processdesignation))
156
+ plt.xlabel('Enthalpy H in kW')
157
+ plt.ylabel('Temperature T in °C')
158
+
159
+
160
+ def drawGrandCompositeCurve(self, processdesignation, heatCascade, grandCompositeCurve, _temperatures, pinchTemperature, localisation):
161
+ fig = plt.figure(num='{}'.format(processdesignation))
162
+ if heatCascade[0]['deltaH'] > 0:
163
+ plt.plot([grandCompositeCurve['H'][0],grandCompositeCurve['H'][1]], [grandCompositeCurve['T'][0],grandCompositeCurve['T'][1]], 'tab:red')
164
+ plt.plot([grandCompositeCurve['H'][0],grandCompositeCurve['H'][1]], [grandCompositeCurve['T'][0],grandCompositeCurve['T'][1]], 'ro')
165
+ elif heatCascade[0]['deltaH'] < 0:
166
+ plt.plot([grandCompositeCurve['H'][0],grandCompositeCurve['H'][1]], [grandCompositeCurve['T'][0],grandCompositeCurve['T'][1]], 'tab:blue')
167
+ plt.plot([grandCompositeCurve['H'][0],grandCompositeCurve['H'][1]], [grandCompositeCurve['T'][0],grandCompositeCurve['T'][1]], 'bo')
168
+
169
+ for i in range(1, len(_temperatures)-1):
170
+ if heatCascade[i]['deltaH'] > 0:
171
+ plt.plot([grandCompositeCurve['H'][i],grandCompositeCurve['H'][i+1]], [grandCompositeCurve['T'][i],grandCompositeCurve['T'][i+1]], 'tab:red')
172
+ plt.plot([grandCompositeCurve['H'][i],grandCompositeCurve['H'][i+1]], [grandCompositeCurve['T'][i],grandCompositeCurve['T'][i+1]], 'ro')
173
+ elif heatCascade[i]['deltaH'] < 0:
174
+ plt.plot([grandCompositeCurve['H'][i],grandCompositeCurve['H'][i+1]], [grandCompositeCurve['T'][i],grandCompositeCurve['T'][i+1]], 'tab:blue')
175
+ plt.plot([grandCompositeCurve['H'][i],grandCompositeCurve['H'][i+1]], [grandCompositeCurve['T'][i],grandCompositeCurve['T'][i+1]], 'bo')
176
+ elif heatCascade[i]['deltaH'] == 0 and grandCompositeCurve['H'][i]!=0:
177
+ plt.plot([grandCompositeCurve['H'][i],grandCompositeCurve['H'][i+1]], [grandCompositeCurve['T'][i],grandCompositeCurve['T'][i+1]], 'tab:blue')
178
+ plt.plot([grandCompositeCurve['H'][i],grandCompositeCurve['H'][i+1]], [grandCompositeCurve['T'][i],grandCompositeCurve['T'][i+1]], 'bo')
179
+
180
+ plt.plot([0, grandCompositeCurve['H'][-1]], [pinchTemperature, pinchTemperature], ':')
181
+
182
+ plt.grid(True)
183
+ if localisation == 'DE':
184
+ plt.title('Großverbundkurve ({})'.format(processdesignation))
185
+ plt.xlabel('Nettoenthalpiestromänderung ∆H [kW]')
186
+ plt.ylabel('Verschobene Temperatur [°C]')
187
+ elif localisation == 'EN':
188
+ plt.title('Grand Composite Diagram ({})'.format(processdesignation))
189
+ plt.xlabel('Net Enthalpy Change ∆H in kW')
190
+ plt.ylabel('Shifted Temperature T in °C')
191
+
192
+ def showPlots():
193
+ plt.show()
pinch_tool/Modules/Pinch/Streams.py ADDED
@@ -0,0 +1,105 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import csv
2
+
3
+ class Streams:
4
+
5
+ def __init__(self, streamsDataFile):
6
+
7
+ self.tmin = 0
8
+ self.numberOf = 0
9
+ self.streamsData = []
10
+
11
+ self._rawStreamsData = []
12
+ self._index = 0
13
+ self._length = 0
14
+
15
+
16
+
17
+
18
+ with open(streamsDataFile, newline='') as f:
19
+ reader = csv.reader(f)
20
+ for row in reader:
21
+ self._rawStreamsData.append(row)
22
+
23
+ if (self._rawStreamsData[0][0].strip() != 'Tmin' or
24
+ [ item.strip() for item in self._rawStreamsData[1] ] != ['CP', 'TSUPPLY', 'TTARGET']):
25
+ raise Exception("""\n[ERROR] Bad formatting in streams data file. \n
26
+ The first two rows of the streams data file should be: \n
27
+ `` Tmin, <TMIN VALUE> ''
28
+ `` CP, TSUPPLY, TTARGET ''\n
29
+ Where CP is the heat capacity (kW / degC);
30
+ TSUPPLY is the starting temperature of the given stream (degC);
31
+ TTARGET is the ending temperature of the given stream (degC);\n""")
32
+
33
+ self.createStreams()
34
+
35
+
36
+ def createStreams(self):
37
+ try:
38
+ self.tmin = float(self._rawStreamsData[0][1])
39
+ except ValueError:
40
+ print("\n[ERROR] Wrong type supplied for Tmin in the streams data file. Perhaps used characters?\n")
41
+ raise
42
+ except IndexError:
43
+ print("\n[ERROR] Missing value for Tmin in the streams data file.\n")
44
+ raise
45
+ except:
46
+ print("\n[ERROR] Unexpected error for Tmin. Try using the supplied streams data file format.\n")
47
+ raise
48
+
49
+
50
+ for rawStream in self._rawStreamsData[2:]:
51
+ try:
52
+ stream = {}
53
+
54
+ if float(rawStream[1]) > float(rawStream[2]):
55
+ stream["type"] = "HOT"
56
+ else:
57
+ stream["type"] = "COLD"
58
+
59
+ stream["cp"] = float(rawStream[0])
60
+ stream["ts"] = float(rawStream[1])
61
+ stream["tt"] = float(rawStream[2])
62
+
63
+ self.streamsData.append(stream)
64
+
65
+ except ValueError:
66
+ print("\n[ERROR] Wrong number type supplied in the streams data file. Perhaps used characters?\n")
67
+ raise
68
+ except IndexError:
69
+ print("\n[ERROR] Missing number in the streams data file.\n")
70
+ raise
71
+ except:
72
+ print("\n[ERROR] Unexpected error. Try using the supplied streams data file format.\n")
73
+ raise
74
+
75
+ self._length = len(self.streamsData)
76
+ self.numberOf = len(self.streamsData)
77
+ if (self._length < 2):
78
+ raise Exception("\n[ERROR] Need to supply at least 2 streams in the streams data file.\n")
79
+
80
+
81
+ def __iter__(self):
82
+ return self
83
+
84
+
85
+ def __next__(self):
86
+ if self._index == self._length:
87
+ self._index = 0
88
+ raise StopIteration
89
+ self._index = self._index + 1
90
+
91
+ return self.streamsData[self._index - 1]
92
+
93
+
94
+ def printTmin(self):
95
+ print(self.tmin)
96
+
97
+
98
+ def printStreams(self):
99
+ for stream in self.streamsData:
100
+ print(stream)
101
+
102
+
103
+ def printRawStreams(self):
104
+ for rawStream in self._rawStreamsData:
105
+ print(rawStream)
pinch_tool/Modules/TotalSiteProfile/TSPPlot.py ADDED
@@ -0,0 +1,96 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import matplotlib.pyplot as plt
2
+
3
+ class TSPPlot():
4
+
5
+ def drawDeletedCurve(self, heatCascadedeltaH, deletedPocketdict, _temperatures, plottest):
6
+ fig = plt.figure()
7
+ if heatCascadedeltaH[0] > 0:
8
+ plt.plot([deletedPocketdict['H'][0][0],deletedPocketdict['H'][0][1]], [deletedPocketdict['T'][0][0],deletedPocketdict['T'][0][1]], 'tab:red')
9
+ plt.plot([deletedPocketdict['H'][0][0],deletedPocketdict['H'][0][1]], [deletedPocketdict['T'][0][0],deletedPocketdict['T'][0][1]], 'ro')
10
+ elif heatCascadedeltaH[0] < 0:
11
+ plt.plot([deletedPocketdict['H'][0][0],deletedPocketdict['H'][0][1]], [deletedPocketdict['T'][0][0],deletedPocketdict['T'][0][1]], 'tab:blue')
12
+ plt.plot([deletedPocketdict['H'][0][0],deletedPocketdict['H'][0][1]], [deletedPocketdict['T'][0][0],deletedPocketdict['T'][0][1]], 'bo')
13
+
14
+ if plottest == 1:
15
+ for i in range(1, len(_temperatures)-2):
16
+ if heatCascadedeltaH[i] > 0:
17
+ plt.plot([deletedPocketdict['H'][0][i],deletedPocketdict['H'][0][i+1]], [deletedPocketdict['T'][0][i],deletedPocketdict['T'][0][i+1]], 'tab:red')
18
+ plt.plot([deletedPocketdict['H'][0][i],deletedPocketdict['H'][0][i+1]], [deletedPocketdict['T'][0][i],deletedPocketdict['T'][0][i+1]], 'ro')
19
+ elif heatCascadedeltaH[i] < 0:
20
+ plt.plot([deletedPocketdict['H'][0][i],deletedPocketdict['H'][0][i+1]], [deletedPocketdict['T'][0][i],deletedPocketdict['T'][0][i+1]], 'tab:blue')
21
+ plt.plot([deletedPocketdict['H'][0][i],deletedPocketdict['H'][0][i+1]], [deletedPocketdict['T'][0][i],deletedPocketdict['T'][0][i+1]], 'bo')
22
+ else:
23
+ plt.plot([deletedPocketdict['H'][0][i],deletedPocketdict['H'][0][i+1]], [deletedPocketdict['T'][0][i],deletedPocketdict['T'][0][i+1]], 'tab:blue')
24
+ plt.plot([deletedPocketdict['H'][0][i],deletedPocketdict['H'][0][i+1]], [deletedPocketdict['T'][0][i],deletedPocketdict['T'][0][i+1]], 'bo')
25
+ if plottest == 0:
26
+ for i in range(1, len(_temperatures)-1):
27
+ if heatCascadedeltaH[i] > 0:
28
+ plt.plot([deletedPocketdict['H'][0][i],deletedPocketdict['H'][0][i+1]], [deletedPocketdict['T'][0][i],deletedPocketdict['T'][0][i+1]], 'tab:red')
29
+ plt.plot([deletedPocketdict['H'][0][i],deletedPocketdict['H'][0][i+1]], [deletedPocketdict['T'][0][i],deletedPocketdict['T'][0][i+1]], 'ro')
30
+ elif heatCascadedeltaH[i] < 0:
31
+ plt.plot([deletedPocketdict['H'][0][i],deletedPocketdict['H'][0][i+1]], [deletedPocketdict['T'][0][i],deletedPocketdict['T'][0][i+1]], 'tab:blue')
32
+ plt.plot([deletedPocketdict['H'][0][i],deletedPocketdict['H'][0][i+1]], [deletedPocketdict['T'][0][i],deletedPocketdict['T'][0][i+1]], 'bo')
33
+ else:
34
+ plt.plot([deletedPocketdict['H'][0][i],deletedPocketdict['H'][0][i+1]], [deletedPocketdict['T'][0][i],deletedPocketdict['T'][0][i+1]], 'tab:blue')
35
+ plt.plot([deletedPocketdict['H'][0][i],deletedPocketdict['H'][0][i+1]], [deletedPocketdict['T'][0][i],deletedPocketdict['T'][0][i+1]], 'bo')
36
+
37
+ # plt.fill_between([0, deletedPocketdict['H'][0][0]], 0, _temperatures[0], color = 'r', alpha = 0.5)
38
+ # plt.fill_between([0, deletedPocketdict['H'][0][-1]], 0, _temperatures[-1], color = 'b', alpha = 0.5)
39
+
40
+
41
+ plt.grid(True)
42
+ plt.title('Grand Composite Curve')
43
+ plt.xlabel('Net Enthalpy Change ∆H [kW]')
44
+ plt.ylabel('Shifted Temperature T [°C]')
45
+
46
+
47
+ def drawTotalSiteProfile(self, siteDesignation, tstHotH, tstHotTemperatures, tstColdH, tstColdTemperatures, localisation):
48
+ #Wieder aus den ausgegebenen Werten fuer alle Prozesse Temperaturintervalle erstellen und diese dann plotten
49
+ fig, (ax1, ax2) = plt.subplots(1, 2)
50
+
51
+ fig.suptitle('Total Site Profile ({})'.format(siteDesignation))
52
+
53
+ ax1.plot(tstHotH, tstHotTemperatures, 'tab:red')
54
+ if localisation == 'DE':
55
+ ax1.set(xlabel='Nettoenthalpieänderung ∆H in kW', ylabel='Verschobene Temperatur T in °C')
56
+ elif localisation == 'EN':
57
+ ax1.set(xlabel='Net Enthalpy Change ∆H in kW', ylabel='Shifted Temperature T in °C')
58
+
59
+ ax1.grid()
60
+ ax2.plot(tstColdH, tstColdTemperatures, 'tab:blue')
61
+ if localisation == 'DE':
62
+ ax2.set_xlabel('Nettoenthalpieänderung ∆H in kW')
63
+ elif localisation == 'EN':
64
+ ax2.set(xlabel='Net Enthalpy Change ∆H in kW')
65
+
66
+ ax2.grid()
67
+
68
+ ax2.set_xlim([0,tstColdH[-1]])
69
+ ax1.set_xlim([tstHotH[0],0])
70
+
71
+ if tstHotH == [] or tstColdH == []:
72
+ pass
73
+
74
+ elif tstHotTemperatures[-1] > tstColdTemperatures[-1]:
75
+ if tstHotTemperatures[0] > tstColdTemperatures[0]:
76
+ ax1.set_ylim([tstColdTemperatures[0]-2.5,tstHotTemperatures[-1]+2.5])
77
+ ax2.set_ylim([tstColdTemperatures[0]-2.5,tstHotTemperatures[-1]+2.5])
78
+ elif tstHotTemperatures[0] < tstColdTemperatures[0]:
79
+ ax1.set_ylim([tstHotTemperatures[0]-2.5,tstHotTemperatures[-1]+2.5])
80
+ ax2.set_ylim([tstHotTemperatures[0]-2.5,tstHotTemperatures[-1]+2.5])
81
+ else:
82
+ pass
83
+
84
+ elif tstHotTemperatures[-1] < tstColdTemperatures[-1]:
85
+ if tstHotTemperatures[0] > tstColdTemperatures[0]:
86
+ ax1.set_ylim([tstColdTemperatures[0]-2.5,tstColdTemperatures[-1]+2.5])
87
+ ax2.set_ylim([tstColdTemperatures[0]-2.5,tstColdTemperatures[-1]+2.5])
88
+ elif tstHotTemperatures[0] < tstColdTemperatures[0]:
89
+ ax1.set_ylim([tstHotTemperatures[0]-2.5,tstColdTemperatures[-1]+2.5])
90
+ ax2.set_ylim([tstHotTemperatures[0]-2.5,tstColdTemperatures[-1]+2.5])
91
+ else:
92
+ pass
93
+
94
+
95
+ def showPlots(self):
96
+ plt.show()
pinch_tool/Modules/TotalSiteProfile/TotalSiteProfile.py ADDED
@@ -0,0 +1,222 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from Pinch_main import Pinchmain as Pinch
2
+ import csv
3
+ import ast
4
+ from Modules.TotalSiteProfile.TSPPlot import TSPPlot
5
+ from Modules.Utility.TemperaturePocketDeletion import TemperaturePocketDeletion as TPD
6
+ from Modules.Utility.splitStreams import splitStreams
7
+
8
+ class TotalSiteProfile:
9
+
10
+ def __init__(self, siteDesignation, options = {}):
11
+
12
+ self.siteDesignation = siteDesignation
13
+
14
+ self.heatCascadedeltaH = []
15
+ self.heatCascadeexitH = []
16
+
17
+ self.deletedPocketdict = []
18
+ self.heatCascadedeltaHdict = []
19
+ self.heatCascadeexitHdict = []
20
+ self.deletedPocketdictlist = []
21
+
22
+ self.splitHotTemperatures = []
23
+ self.splitColdTemperatures = []
24
+ self.splitHotH = []
25
+ self.splitColdH = []
26
+ self.splitHotdeltaH = []
27
+ self.splitColddeltaH = []
28
+ self.splitdict = {'HotTemperatures': [], 'ColdTemperatures': [], 'HotH': [], 'ColdH': [], 'HotdeltaH': [], 'ColddeltaH': [], 'SteigungHot': [], 'SteigungCold': []}
29
+
30
+ self.tstHotConstructionAid = {'T': [], 'Process': [], 'Stream': []}
31
+ self.tstColdConstructionAid = {'T': [], 'Process': [], 'Stream': []}
32
+
33
+ self.tstHotTemperatures = []
34
+ self.tstColdTemperatures = []
35
+ self.tstHotH = []
36
+ self.tstColdH = []
37
+
38
+ self.emptyintervalsHot = []
39
+ self.emptyintervalsCold = []
40
+
41
+ self._options = {'debug': False, 'draw': False, 'csv': False}
42
+
43
+ if 'debug' in options:
44
+ self._options['debug'] = True
45
+ if 'draw' in options:
46
+ self._options['draw'] = True
47
+ if 'csv' in options:
48
+ self._options['csv'] = True
49
+
50
+
51
+ def importData(self, siteProfilecsv):
52
+ nooption = {}
53
+ Pinch('{}'.format(siteProfilecsv),nooption).solvePinch()
54
+ with open('Buffer file for TotalSiteProfile creation.csv', newline='') as f:
55
+ reader = csv.reader(f)
56
+ csvdata=[]
57
+ for row in reader:
58
+ csvdata.append(row)
59
+ self._temperatures = csvdata[0]
60
+ self.heatCascade = csvdata[1]
61
+ for i in range(len(self.heatCascade)):
62
+ self.heatCascade[i] = ast.literal_eval(self.heatCascade[i])
63
+ self.hotUtility = float(csvdata[2][0])
64
+ for i in range(len(self._temperatures)):
65
+ self._temperatures[i] = float(self._temperatures[i])
66
+
67
+ def deleteTemperaturePockets(self):
68
+ self.deletedPocketdict = TPD(self.hotUtility, self.heatCascade, self._temperatures).deleteTemperaturePockets()
69
+
70
+
71
+ def noDeletionHelper(self):
72
+ self.deletedPocketdict = {'H': [], 'deltaH': [], 'T': []}
73
+ self.heatCascadedeltaH = []
74
+ self.heatCascadeexitH = []
75
+ self.heatCascadeexitH.append(self.hotUtility)
76
+
77
+ for o in range(len(self.heatCascade)):
78
+ self.heatCascadedeltaH.append(self.heatCascade[o]['deltaH'])
79
+ self.heatCascadeexitH.append(self.heatCascade[o]['exitH'])
80
+
81
+ self.deletedPocketdict['H'].append(self.heatCascadeexitH)
82
+ self.deletedPocketdict['deltaH'].append(self.heatCascadedeltaH)
83
+ self.deletedPocketdict['T'].append(self._temperatures)
84
+ self.deletedPocketdictlist.append(self.deletedPocketdict)
85
+
86
+ def splitHotandCold(self): # Fall für 2 aufeinander folgende heiße Ströme ohne deletion implementieren
87
+ self.splitdict = splitStreams(self.deletedPocketdict, self.splitdict).splitHotandCold()
88
+
89
+ def constructTotalSiteProfile(self, localisation): #Temperaturen sind in jedem Prozess bereits einzigartig für Heiß/Kalt
90
+ #Mit ColddeltaH und Temperaturen jeweils Steigungen für alle Schritte in Reihenfolge ermitteln
91
+ #Dann abfragen, welche Steigung im aktuellen Temperaturintervall benötigt wird(einfach der Reihe nach oder wenn T kleiner als die aktuelle Temperatur aber größer als die vorherige)
92
+ #Anschließend jeweils die Steigungen addieren und mal dem aktuellen Temperaturintervall rechnen und mit dem vorherigen Wert addiert anfügen
93
+
94
+
95
+ for i in range(len(self.splitdict['HotTemperatures'])):
96
+ for j in range(len(self.splitdict['HotTemperatures'][i])):
97
+ self.tstHotTemperatures.append(self.splitdict['HotTemperatures'][i][j])
98
+
99
+ if self.splitdict['HotTemperatures'][i][j] not in self.tstHotConstructionAid['T']:
100
+ self.tstHotConstructionAid['T'].append(self.splitdict['HotTemperatures'][i][j])
101
+ self.tstHotConstructionAid['Process'].append([i])
102
+ self.tstHotConstructionAid['Stream'].append([j])
103
+
104
+ elif self.splitdict['HotTemperatures'][i][j] in self.tstHotConstructionAid['T']:
105
+ temp = self.tstHotConstructionAid['T'].index(self.splitdict['HotTemperatures'][i][j])
106
+ self.tstHotConstructionAid['Process'][temp].append(i)
107
+ self.tstHotConstructionAid['Stream'][temp].append(j)
108
+
109
+ self.tstHotTemperatures = list(set(self.tstHotTemperatures))
110
+ self.tstHotTemperatures.sort()
111
+
112
+ for i in range(len(self.splitdict['ColdTemperatures'])):
113
+ for j in range(len(self.splitdict['ColdTemperatures'][i])):
114
+ self.tstColdTemperatures.append(self.splitdict['ColdTemperatures'][i][j])
115
+
116
+ if self.splitdict['ColdTemperatures'][i][j] not in self.tstColdConstructionAid['T']:
117
+ self.tstColdConstructionAid['T'].append(self.splitdict['ColdTemperatures'][i][j])
118
+ self.tstColdConstructionAid['Process'].append([i])
119
+ self.tstColdConstructionAid['Stream'].append([j])
120
+
121
+ elif self.splitdict['ColdTemperatures'][i][j] in self.tstColdConstructionAid['T']:
122
+ temp = self.tstColdConstructionAid['T'].index(self.splitdict['ColdTemperatures'][i][j])
123
+ self.tstColdConstructionAid['Process'][temp].append(i)
124
+ self.tstColdConstructionAid['Stream'][temp].append(j)
125
+
126
+ self.tstColdTemperatures = list(set(self.tstColdTemperatures))
127
+ self.tstColdTemperatures.sort()
128
+
129
+ Steigung = []
130
+ for Prozess in range(len(self.splitdict['ColdTemperatures'])):
131
+ for Temperatur in range(len(self.splitdict['ColdTemperatures'][Prozess])-1):
132
+ if self.splitdict['ColdTemperatures'][Prozess][Temperatur]-self.splitdict['ColdTemperatures'][Prozess][Temperatur+1] == 0:
133
+ Steigung.append(0.0)
134
+ elif self.splitdict['ColdH'][Prozess][Temperatur]-self.splitdict['ColdH'][Prozess][Temperatur+1]<0:
135
+ Steigung.append(0.0)
136
+ else:
137
+ Steigung.append((self.splitdict['ColdH'][Prozess][Temperatur]-self.splitdict['ColdH'][Prozess][Temperatur+1])/(self.splitdict['ColdTemperatures'][Prozess][Temperatur]-self.splitdict['ColdTemperatures'][Prozess][Temperatur+1]))
138
+ self.splitdict['SteigungCold'].append(Steigung)
139
+ Steigung = []
140
+
141
+
142
+ Steigung = []
143
+ for Prozess in range(len(self.splitdict['HotTemperatures'])):
144
+ for Temperatur in range(len(self.splitdict['HotTemperatures'][Prozess])-1):
145
+ if self.splitdict['HotH'][Prozess][Temperatur]-self.splitdict['HotH'][Prozess][Temperatur+1]>0:
146
+ Steigung.append(0.0)
147
+ else:
148
+ Steigung.append((self.splitdict['HotH'][Prozess][Temperatur]-self.splitdict['HotH'][Prozess][Temperatur+1])/(self.splitdict['HotTemperatures'][Prozess][Temperatur]-self.splitdict['HotTemperatures'][Prozess][Temperatur+1]))
149
+ self.splitdict['SteigungHot'].append(Steigung)
150
+ Steigung = []
151
+
152
+ kW = 0.0
153
+ for Temperatur in self.tstHotTemperatures:
154
+ for Prozess in reversed(range(len(self.splitdict['HotTemperatures']))):
155
+ if self.splitdict['HotTemperatures'][Prozess] == []:
156
+ continue
157
+ elif self.splitdict['HotTemperatures'][Prozess][-1] >= Temperatur:
158
+ continue
159
+
160
+ for Prozesstemperatur in reversed(range(len(self.splitdict['HotTemperatures'][Prozess]))):
161
+ if Temperatur == self.splitdict['HotTemperatures'][Prozess][Prozesstemperatur]:
162
+ if letzteTemperaturHot > self.splitdict['HotTemperatures'][Prozess][Prozesstemperatur+1] and letzteTemperaturHot < self.splitdict['HotTemperatures'][Prozess][Prozesstemperatur]:
163
+ kW += self.splitdict['SteigungHot'][Prozess][Prozesstemperatur] * (self.splitdict['HotTemperatures'][Prozess][Prozesstemperatur] - letzteTemperaturHot)
164
+ break
165
+ else:
166
+ kW += self.splitdict['SteigungHot'][Prozess][Prozesstemperatur] * (Temperatur - self.splitdict['HotTemperatures'][Prozess][Prozesstemperatur+1])
167
+ break
168
+
169
+ elif Temperatur > self.splitdict['HotTemperatures'][Prozess][Prozesstemperatur] and Temperatur < self.splitdict['HotTemperatures'][Prozess][Prozesstemperatur-1]:
170
+ if letzteTemperaturHot > self.splitdict['HotTemperatures'][Prozess][Prozesstemperatur] and letzteTemperaturHot < self.splitdict['HotTemperatures'][Prozess][Prozesstemperatur-1]:
171
+ kW += self.splitdict['SteigungHot'][Prozess][Prozesstemperatur-1] * (Temperatur - letzteTemperaturHot)
172
+ else:
173
+ kW += self.splitdict['SteigungHot'][Prozess][Prozesstemperatur-1] * (Temperatur - self.splitdict['HotTemperatures'][Prozess][Prozesstemperatur])
174
+ break
175
+ else:
176
+ continue
177
+ if self.tstHotH == []:
178
+ self.tstHotH.append(kW)
179
+ else:
180
+ self.tstHotH.append(self.tstHotH[-1] + kW)
181
+ kW = 0.0
182
+ letzteTemperaturHot = Temperatur
183
+
184
+ kW = 0.0
185
+ for Temperatur in self.tstColdTemperatures:
186
+ for Prozess in reversed(range(len(self.splitdict['ColdTemperatures']))):
187
+ if self.splitdict['ColdTemperatures'][Prozess][-1] >= Temperatur:
188
+ continue
189
+
190
+ for Prozesstemperatur in reversed(range(len(self.splitdict['ColdTemperatures'][Prozess]))):
191
+ if Temperatur == self.splitdict['ColdTemperatures'][Prozess][Prozesstemperatur]:
192
+ if letzteTemperaturCold > self.splitdict['ColdTemperatures'][Prozess][Prozesstemperatur+1] and letzteTemperaturCold < self.splitdict['ColdTemperatures'][Prozess][Prozesstemperatur]:
193
+ kW += self.splitdict['SteigungCold'][Prozess][Prozesstemperatur] * (self.splitdict['ColdTemperatures'][Prozess][Prozesstemperatur] - letzteTemperaturCold)
194
+ break
195
+ else:
196
+ kW += self.splitdict['SteigungCold'][Prozess][Prozesstemperatur] * (Temperatur - self.splitdict['ColdTemperatures'][Prozess][Prozesstemperatur+1])
197
+ break
198
+
199
+ elif Temperatur > self.splitdict['ColdTemperatures'][Prozess][Prozesstemperatur] and Temperatur < self.splitdict['ColdTemperatures'][Prozess][Prozesstemperatur-1]:
200
+ if letzteTemperaturCold > self.splitdict['ColdTemperatures'][Prozess][Prozesstemperatur] and letzteTemperaturCold < self.splitdict['ColdTemperatures'][Prozess][Prozesstemperatur-1]:
201
+ kW += self.splitdict['SteigungCold'][Prozess][Prozesstemperatur-1] * (Temperatur - letzteTemperaturCold)
202
+ else:
203
+ kW += self.splitdict['SteigungCold'][Prozess][Prozesstemperatur-1] * (Temperatur - self.splitdict['ColdTemperatures'][Prozess][Prozesstemperatur])
204
+ break
205
+ else:
206
+ continue
207
+ if self.tstColdH == []:
208
+ self.tstColdH.append(kW)
209
+ else:
210
+ self.tstColdH.append(self.tstColdH[-1] + kW)
211
+ kW = 0.0
212
+ letzteTemperaturCold = Temperatur
213
+
214
+ maxheiß = self.tstHotH[-1]
215
+ self.tstHotH = [wert - maxheiß for wert in self.tstHotH]
216
+ a = 1
217
+ for i in range(len(self.tstHotH)):
218
+ self.tstHotH[i] = -self.tstHotH[i]
219
+ if self._options['draw'] == True:
220
+ TSPPlot().drawTotalSiteProfile(self.siteDesignation, self.tstHotH,
221
+ self.tstHotTemperatures, self.tstColdH, self.tstColdTemperatures, localisation)
222
+
pinch_tool/Modules/Utility/TemperaturePocketDeletion.py ADDED
@@ -0,0 +1,280 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from Modules.TotalSiteProfile.TSPPlot import TSPPlot
2
+
3
+ class TemperaturePocketDeletion:
4
+ def __init__(self, hotUtility, heatCascade, _temperatures):
5
+ self.hotUtility = hotUtility
6
+ self.heatCascade = heatCascade
7
+ self._temperatures = _temperatures
8
+ self._options = {'draw': True}
9
+
10
+ def deleteDoubleEmpty(self,u):
11
+ if u < len(self.heatCascadedeltaH)-2:
12
+ if self.heatCascadedeltaH[u+1] == 0.0 and self.heatCascadedeltaH[u+2] == 0.0:
13
+ self.heatCascadedeltaH.pop(u+1)
14
+ self.heatCascadeexitH.pop(u+2)
15
+ self._temperatures.pop(u+2)
16
+
17
+ def deleteTemperaturePockets(self):
18
+ self.deletedPocketdict = {'H': [], 'deltaH': [], 'T': []}
19
+ self.deletedPocketdictlist=[]
20
+
21
+ i = 0
22
+ j = 0
23
+ k = 0
24
+ u = 0
25
+ plottest = 0
26
+ self.heatCascadedeltaH = []
27
+ self.heatCascadeexitH = []
28
+ self.heatCascadeexitH.append(self.hotUtility)
29
+
30
+ for o in range(len(self.heatCascade)):
31
+ self.heatCascadedeltaH.append(self.heatCascade[o]['deltaH'])
32
+ self.heatCascadeexitH.append(self.heatCascade[o]['exitH'])
33
+
34
+ for i in range(len(self.heatCascadeexitH)-1):
35
+ if self.heatCascadeexitH[i] <= 1e-22:
36
+ j = i
37
+ k = i
38
+ break
39
+ if j == 0 and k == 0:
40
+ if self.heatCascadeexitH[-1] <= 1e-22:
41
+ k = len(self.heatCascadeexitH)-1
42
+ j = len(self.heatCascadeexitH)-1
43
+ plottest = 1
44
+ elif self.heatCascadeexitH[0] <= 1e-22:
45
+ j = 0
46
+ while j < len(self.heatCascadeexitH)-1:
47
+ if self.heatCascadedeltaH[j] == 0:
48
+ j += 1
49
+
50
+ if j >= len(self.heatCascadedeltaH)-1:
51
+ break
52
+
53
+ if self.heatCascadedeltaH[j] > 0:
54
+ if self.heatCascadedeltaH[j + 1] < 0:
55
+ if abs(self.heatCascadedeltaH[j+1]) < abs(self.heatCascadedeltaH[j]):
56
+ self._temperatures[j+1] = self._temperatures[j] + ((self._temperatures[j]-self._temperatures[j+1]) / (self.heatCascadeexitH[j] - self.heatCascadeexitH[j+1]))* (self.heatCascadeexitH[j+2]) # lineare regression der Temperatur
57
+ self.heatCascadedeltaH[j] = self.heatCascadeexitH[j+2] - self.heatCascadeexitH[j]
58
+ self.heatCascadeexitH[j+1]=self.heatCascadeexitH[j+2]
59
+ self.heatCascadedeltaH[j+1] = 0.0
60
+ self.deleteDoubleEmpty(j)
61
+ if self.heatCascadedeltaH[-1] == 0.0:
62
+ self.heatCascadedeltaH.pop()
63
+ self._temperatures.pop()
64
+ self.heatCascadeexitH.pop()
65
+ break
66
+ j = i
67
+ elif abs(self.heatCascadedeltaH[j+1]) > abs(self.heatCascadedeltaH[j]):
68
+ self._temperatures[j+1] = self._temperatures[j+2] + ((self._temperatures[j+1]-self._temperatures[j+2]) / (self.heatCascadeexitH[j+1] - self.heatCascadeexitH[j+2]))* (self.heatCascadeexitH[j] - self.heatCascadeexitH[j+2]) # lineare regression der Temperatur
69
+ self.heatCascadeexitH[j+1]=self.heatCascadeexitH[j]
70
+ self.heatCascadedeltaH[j] = 0.0
71
+ self.deleteDoubleEmpty(j)
72
+ if self.heatCascadedeltaH[-1] == 0.0:
73
+ self.heatCascadedeltaH.pop()
74
+ self._temperatures.pop()
75
+ self.heatCascadeexitH.pop()
76
+ break
77
+ j=i
78
+ else:
79
+ self.heatCascadedeltaH = 0.0
80
+ self.heatCascadedeltaH.pop(j+1)
81
+ self.heatCascadeexitH.pop(j+1)
82
+ self._temperatures.pop(j+1)
83
+ self.deleteDoubleEmpty(j)
84
+ if self.heatCascadedeltaH[-1] == 0.0:
85
+ self.heatCascadedeltaH.pop()
86
+ self._temperatures.pop()
87
+ self.heatCascadeexitH.pop()
88
+ break
89
+ j=i
90
+ elif self.heatCascadedeltaH[j+1] == 0:
91
+ if self.heatCascadedeltaH[j+2]<0:
92
+ if abs(self.heatCascadedeltaH[j+2]) > abs(self.heatCascadedeltaH[j]):
93
+ self._temperatures[j+2] = self._temperatures[j+3] + ((self._temperatures[j+2]-self._temperatures[j+3]) / (self.heatCascadeexitH[j+2] - self.heatCascadeexitH[j+3]))* (self.heatCascadeexitH[j]-self.heatCascadeexitH[j+3])
94
+ self.heatCascadeexitH[j+2] = self.heatCascadeexitH[j]
95
+ self.heatCascadedeltaH[j+2] = self.heatCascadedeltaH[j+2] + self.heatCascadedeltaH[j]
96
+ self._temperatures.pop(j+1)
97
+ self.heatCascadeexitH.pop(j+1)
98
+ self.heatCascadedeltaH.pop(j)
99
+ self.deleteDoubleEmpty(j)
100
+ if self.heatCascadedeltaH[-1] == 0.0:
101
+ self.heatCascadedeltaH.pop()
102
+ self._temperatures.pop()
103
+ self.heatCascadeexitH.pop()
104
+ break
105
+ j=i
106
+ elif abs(self.heatCascadedeltaH[j+2]) < abs(self.heatCascadedeltaH[j]):
107
+ self._temperatures[j+1] = self._temperatures[j+1] - ((self._temperatures[j]-self._temperatures[j+1]) / (self.heatCascadeexitH[j] - self.heatCascadeexitH[j+1]))* (self.heatCascadeexitH[j+1]-self.heatCascadeexitH[j+3]) # lineare regression der Temperatur
108
+ self.heatCascadeexitH[j+1] = self.heatCascadeexitH[j+3]
109
+ self.heatCascadeexitH.pop(j+2)
110
+ self.heatCascadedeltaH[j] = self.heatCascadedeltaH[j] + self.heatCascadedeltaH[j+2]
111
+ self.heatCascadedeltaH.pop(j+2)
112
+ self._temperatures.pop(j+2)
113
+ self.deleteDoubleEmpty(j)
114
+ if self.heatCascadedeltaH[-1] == 0.0:
115
+ self.heatCascadedeltaH.pop()
116
+ self._temperatures.pop()
117
+ self.heatCascadeexitH.pop()
118
+ break
119
+ j=i
120
+ else:
121
+ self.heatCascadedeltaH.pop(j+1)
122
+ self._temperatures.pop(j+1)
123
+ self.heatCascadeexitH.pop(j+1)
124
+ self.heatCascadedeltaH.pop(j+1)
125
+ self._temperatures.pop(j+1)
126
+ self.heatCascadeexitH.pop(j+1)
127
+ self.heatCascadedeltaH[j] = 0.0
128
+ self.deleteDoubleEmpty(j)
129
+ if self.heatCascadedeltaH[-1] == 0.0:
130
+ self.heatCascadedeltaH.pop()
131
+ self._temperatures.pop()
132
+ self.heatCascadeexitH.pop()
133
+ break
134
+ j=i
135
+ else:
136
+ j+=1
137
+ else:
138
+ j += 1
139
+ else:
140
+ j += 1
141
+ while u < k-1:
142
+ if u >= len(self.heatCascadedeltaH)-1:
143
+ break
144
+ #unterscheidung einfügen ob u=0
145
+ if self.heatCascadedeltaH[u] > 0 and u == 0: #löscht nur den obersten, wenn der oberste direkt dran ist
146
+ if self.heatCascadedeltaH[u+1] < 0:
147
+ if abs(self.heatCascadedeltaH[u+1]) > abs(self.heatCascadedeltaH[u]):
148
+ #kalt größer heiß
149
+ self._temperatures[u+1] = self._temperatures[u+2] + ((self._temperatures[u+1]-self._temperatures[u+2]) / (self.heatCascadeexitH[u+1] - self.heatCascadeexitH[u+2]))* (self.heatCascadeexitH[u]-self.heatCascadeexitH[u+2]) # lineare regression der Temperatur
150
+ self.heatCascadedeltaH[u] = self.heatCascadeexitH[u+2] - self.heatCascadeexitH[u]
151
+ self._temperatures.pop(u)
152
+ self.heatCascadeexitH.pop(u+1)
153
+ self.heatCascadedeltaH.pop(u+1)
154
+ self.deleteDoubleEmpty(u)
155
+ u = 0
156
+ elif abs(self.heatCascadedeltaH[u+1]) < abs(self.heatCascadedeltaH[u]):
157
+ self._temperatures[u+1] = self._temperatures[u] + ((self._temperatures[u+1]-self._temperatures[u]) / (self.heatCascadeexitH[u+1] - self.heatCascadeexitH[u]))* (self.heatCascadeexitH[u+2]-self.heatCascadeexitH[u]) # lineare regression der Temperatur
158
+ self.heatCascadedeltaH[u] = self.heatCascadeexitH[u] + self.heatCascadeexitH[u+1]
159
+ self.heatCascadeexitH[u+1] = self.heatCascadeexitH[u+2]
160
+ self.heatCascadedeltaH[u+1] = 0.0
161
+ self.deleteDoubleEmpty(u)
162
+ u = 0
163
+ else:
164
+ self._temperatures.pop(u)
165
+ self.heatCascadeexitH.pop(u)
166
+ self.heatCascadedeltaH.pop(u)
167
+ self._temperatures.pop(u)
168
+ self.heatCascadeexitH.pop(u)
169
+ self.heatCascadedeltaH.pop(u)
170
+ u=0
171
+ elif self.heatCascadedeltaH[u+1] == 0:
172
+ if self.heatCascadedeltaH[u+2]<0:
173
+ if abs(self.heatCascadedeltaH[u+2]) > abs(self.heatCascadedeltaH[u]):
174
+ self._temperatures[u+2] = self._temperatures[u+3] + ((self._temperatures[u+2]-self._temperatures[u+3]) / (self.heatCascadeexitH[u+2] - self.heatCascadeexitH[u+3]))* (self.heatCascadeexitH[u]-self.heatCascadeexitH[u+3])
175
+ self.heatCascadeexitH[u+2] = self.heatCascadeexitH[u]
176
+ self.heatCascadedeltaH[u+2] = self.heatCascadedeltaH[u+2] + self.heatCascadedeltaH[u]
177
+ self._temperatures.pop(u+1)
178
+ self.heatCascadeexitH.pop(u+1)
179
+ self.heatCascadedeltaH.pop(u)
180
+ self.deleteDoubleEmpty(u)
181
+ self._temperatures.pop(u)
182
+ self.heatCascadedeltaH.pop(u)
183
+ self.heatCascadeexitH.pop(u)
184
+ u=0
185
+ elif abs(self.heatCascadedeltaH[u+2]) < abs(self.heatCascadedeltaH[u]):
186
+ self._temperatures[u+1] = self._temperatures[u+1] - ((self._temperatures[u]-self._temperatures[u+1]) / (self.heatCascadeexitH[u] - self.heatCascadeexitH[u+1]))* (self.heatCascadeexitH[u+1]-self.heatCascadeexitH[u+3]) # lineare regression der Temperatur
187
+ self.heatCascadeexitH[u+1] = self.heatCascadeexitH[u+3]
188
+ self.heatCascadeexitH.pop(u+2)
189
+ self.heatCascadedeltaH[u] = self.heatCascadedeltaH[u] + self.heatCascadedeltaH[u+2]
190
+ self.heatCascadedeltaH.pop(u+2)
191
+ self._temperatures.pop(u+2)
192
+ self.deleteDoubleEmpty(u)
193
+ self._temperatures.pop(u)
194
+ self.heatCascadedeltaH.pop(u)
195
+ self.heatCascadeexitH.pop(u)
196
+ u=0
197
+ else:
198
+ self._temperatures.pop(u)
199
+ self.heatCascadeexitH.pop(u)
200
+ self.heatCascadedeltaH.pop(u)
201
+ self._temperatures.pop(u)
202
+ self.heatCascadeexitH.pop(u)
203
+ self.heatCascadedeltaH.pop(u)
204
+ self._temperatures.pop(u)
205
+ self.heatCascadeexitH.pop(u)
206
+ self.heatCascadedeltaH.pop(u)
207
+ u=0
208
+ else:
209
+ u+=1
210
+ else:
211
+ u += 1
212
+ elif self.heatCascadedeltaH[u] > 0 and u != 0:
213
+ if self.heatCascadedeltaH[u + 1] < 0:
214
+ if abs(self.heatCascadedeltaH[u+1]) > abs(self.heatCascadedeltaH[u]):
215
+ self._temperatures[u+1] = self._temperatures[u+2] + ((self._temperatures[u+1]-self._temperatures[u+2]) / (self.heatCascadeexitH[u+1] - self.heatCascadeexitH[u+2]))* (self.heatCascadeexitH[u]-self.heatCascadeexitH[u+2]) # lineare regression der Temperatur
216
+ self.heatCascadedeltaH[u+1] = self.heatCascadedeltaH[u+1] + self.heatCascadedeltaH[u]
217
+ self.heatCascadeexitH[u+1] = self.heatCascadeexitH[u]
218
+ self.heatCascadedeltaH[u] = 0.0
219
+ self.deleteDoubleEmpty(u)
220
+ u = 0
221
+ elif abs(self.heatCascadedeltaH[u+1]) < abs(self.heatCascadedeltaH[u]):
222
+ self._temperatures[u+1] = self._temperatures[u] + ((self._temperatures[u+1]-self._temperatures[u]) / (self.heatCascadeexitH[u+1] - self.heatCascadeexitH[u]))* (self.heatCascadeexitH[u+2]-self.heatCascadeexitH[u]) # lineare regression der Temperatur
223
+ self.heatCascadedeltaH[u] = self.heatCascadeexitH[u] + self.heatCascadeexitH[u+1]
224
+ self.heatCascadeexitH[u+1] = self.heatCascadeexitH[u+2]
225
+ self.heatCascadedeltaH[u+1] = 0.0
226
+ self.deleteDoubleEmpty(u)
227
+ u = 0 # selbes wie u==0
228
+ else:
229
+ self._temperatures.pop(u+1)
230
+ self.heatCascadeexitH.pop(u+1)
231
+ self.heatCascadedeltaH[u] = 0.0
232
+ self.heatCascadedeltaH.pop(u+1)
233
+ self.deleteDoubleEmpty(u)
234
+ u=0
235
+ elif self.heatCascadedeltaH[u + 1] == 0:
236
+ if self.heatCascadedeltaH[u+2]<0:
237
+ if abs(self.heatCascadedeltaH[u+2]) > abs(self.heatCascadedeltaH[u]):
238
+ self._temperatures[u+2] = self._temperatures[u+3] + ((self._temperatures[u+2]-self._temperatures[u+3]) / (self.heatCascadeexitH[u+2] - self.heatCascadeexitH[u+3]))* (self.heatCascadeexitH[u]-self.heatCascadeexitH[u+3])
239
+ self.heatCascadeexitH[u+2] = self.heatCascadeexitH[u]
240
+ self.heatCascadedeltaH[u+2] = self.heatCascadedeltaH[u+2] + self.heatCascadedeltaH[u]
241
+ self._temperatures.pop(u+1)
242
+ self.heatCascadeexitH.pop(u+1)
243
+ self.heatCascadedeltaH.pop(u)
244
+ self.deleteDoubleEmpty(u)
245
+ u=0
246
+ elif abs(self.heatCascadedeltaH[u+2]) < abs(self.heatCascadedeltaH[u]):
247
+ self._temperatures[u+1] = self._temperatures[u+1] - ((self._temperatures[u]-self._temperatures[u+1]) / (self.heatCascadeexitH[u] - self.heatCascadeexitH[u+1]))* (self.heatCascadeexitH[u+1]-self.heatCascadeexitH[u+3]) # lineare regression der Temperatur
248
+ self.heatCascadeexitH[u+1] = self.heatCascadeexitH[u+3]
249
+ self.heatCascadeexitH.pop(u+2)
250
+ self.heatCascadedeltaH[u] = self.heatCascadedeltaH[u] + self.heatCascadedeltaH[u+2]
251
+ self.heatCascadedeltaH.pop(u+2)
252
+ self._temperatures.pop(u+2)
253
+ self.deleteDoubleEmpty(u)
254
+ u=0
255
+ else:
256
+ self._temperatures.pop(u+1)
257
+ self.heatCascadeexitH.pop(u+1)
258
+ self._temperatures.pop(u+1)
259
+ self.heatCascadeexitH.pop(u+1)
260
+ self.heatCascadedeltaH[u] = 0.0
261
+ self.heatCascadedeltaH.pop(u+1)
262
+ self.heatCascadedeltaH.pop(u+1)
263
+ self.deleteDoubleEmpty(u)
264
+ u=0
265
+ else:
266
+ u+=1
267
+ else:
268
+ u+=1
269
+ else:
270
+ u+=1
271
+
272
+ self.deletedPocketdict['H'].append(self.heatCascadeexitH)
273
+ self.deletedPocketdict['deltaH'].append(self.heatCascadedeltaH)
274
+ self.deletedPocketdict['T'].append(self._temperatures)
275
+ self.deletedPocketdictlist.append(self.deletedPocketdict)
276
+
277
+ if self._options['draw'] == True:
278
+ TSPPlot.drawDeletedCurve(self, self.heatCascadedeltaH, self.deletedPocketdict, self._temperatures, plottest)
279
+
280
+ return self.deletedPocketdict
pinch_tool/Modules/Utility/Thermodynamic_Properties.py ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from CoolProp.CoolProp import PropsSI
2
+
3
+ class ThermodynamicProperties():
4
+
5
+ def get_hprime(T, fluid = 'Water'):
6
+ TK = float(T)+273.15
7
+ return PropsSI('H', 'T', TK, 'Q', 0, fluid) / 1000 # kJ/kg
8
+
9
+ def get_hdouble_prime(T, fluid = 'Water'):
10
+ TK = float(T)+273.15
11
+ return PropsSI('H', 'T', TK, 'Q', 1, fluid) / 1000 # kJ/kg
12
+
13
+ def get_vprime(T, fluid = 'Water'):
14
+ TK = float(T)+273.15
15
+ rho_liq = PropsSI('D', 'T', TK, 'Q', 0, fluid) # kg/m3
16
+ return 1 / rho_liq # m3/kg
17
+
18
+ def get_latentheat(T, fluid = 'Water'):
19
+ TK = float(T)+273.15
20
+ return (PropsSI('H', 'T', TK, 'Q', 1, fluid) / 1000) - (PropsSI('H', 'T', TK, 'Q', 0, fluid) / 1000) # kJ/kg
21
+
pinch_tool/Modules/Utility/splitStreams.py ADDED
@@ -0,0 +1,105 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+
3
+ class splitStreams():
4
+ def __init__(self, deletedPocketdict,splitdict):
5
+ self.deletedPocketdict = deletedPocketdict
6
+
7
+ self.splitdict = splitdict
8
+
9
+ def splitHotandCold(self):
10
+ self.splitHotTemperatures = []
11
+ self.splitColdTemperatures = []
12
+ self.splitHotH = []
13
+ self.splitColdH = []
14
+ testHot = 0
15
+ testCold = 0
16
+
17
+ for i in range(len(self.deletedPocketdict['T'])):
18
+ for j in range(len(self.deletedPocketdict['T'][i])):
19
+ if j >= len(self.deletedPocketdict['deltaH'][i]):
20
+ continue
21
+ if self.deletedPocketdict['deltaH'][i][j] > 0 and testHot == 0:
22
+ self.splitHotTemperatures.append(self.deletedPocketdict['T'][i][j])
23
+ self.splitHotH.append(self.deletedPocketdict['H'][i][j])
24
+ self.splitHotTemperatures.append(self.deletedPocketdict['T'][i][j+1])
25
+ self.splitHotH.append(self.deletedPocketdict['H'][i][j+1])
26
+ testHot = 1
27
+
28
+ elif self.deletedPocketdict['deltaH'][i][j] > 0 and testHot == 1:
29
+ if j == len(self.deletedPocketdict['deltaH'][i])-1:
30
+ self.splitHotTemperatures.append(self.deletedPocketdict['T'][i][j+1])
31
+ self.splitHotH.append(self.deletedPocketdict['H'][i][j+1])
32
+ elif self.deletedPocketdict['deltaH'][i][j+1] < 0:
33
+ self.splitHotTemperatures.append(self.deletedPocketdict['T'][i][j])
34
+ self.splitHotH.append(self.splitHotH[-1])
35
+ self.splitHotTemperatures.append(self.deletedPocketdict['T'][i][j+1])
36
+ self.splitHotH.append(self.splitHotH[-1] + self.deletedPocketdict['deltaH'][i][j])# Anpassen
37
+ else:
38
+ self.splitHotTemperatures.append(self.deletedPocketdict['T'][i][j+1])
39
+ self.splitHotH.append(self.deletedPocketdict['H'][i][j+1])
40
+
41
+ elif self.deletedPocketdict['deltaH'][i][j] < 0 and testCold == 0:
42
+ self.splitColdTemperatures.append(self.deletedPocketdict['T'][i][j])
43
+ self.splitColdH.append(self.deletedPocketdict['H'][i][j])
44
+ self.splitColdTemperatures.append(self.deletedPocketdict['T'][i][j+1])
45
+ self.splitColdH.append(self.deletedPocketdict['H'][i][j+1])
46
+ testCold = 1
47
+ elif self.deletedPocketdict['deltaH'][i][j] < 0 and testCold == 1:
48
+ if j == len(self.deletedPocketdict['deltaH'][i])-1:
49
+ if self.splitColdH[-1] < 0:
50
+ self.splitColdTemperatures.append(self.deletedPocketdict['T'][i][j+1])
51
+ self.splitColdH.append(self.deletedPocketdict['deltaH'][i][j] + self.splitColdH[-1])
52
+ else:
53
+ self.splitColdTemperatures.append(self.deletedPocketdict['T'][i][j+1])
54
+ self.splitColdH.append(self.deletedPocketdict['H'][i][j+1])
55
+ elif self.deletedPocketdict['deltaH'][i][j+1] > 0 or self.deletedPocketdict['deltaH'][i][j-1]:
56
+ self.splitColdTemperatures.append(self.deletedPocketdict['T'][i][j])
57
+ self.splitColdH.append(self.splitColdH[-1])
58
+ self.splitColdTemperatures.append(self.deletedPocketdict['T'][i][j+1])
59
+ self.splitColdH.append(self.deletedPocketdict['deltaH'][i][j] + self.splitColdH[-1])# Anpassen
60
+ else:
61
+ if self.splitColdH[-1] < 0:
62
+ self.splitColdTemperatures.append(self.deletedPocketdict['T'][i][j+1])
63
+ self.splitColdH.append(self.deletedPocketdict['deltaH'][i][j] + self.splitColdH[-1])
64
+ else:
65
+ self.splitColdTemperatures.append(self.deletedPocketdict['T'][i][j+1])
66
+ self.splitColdH.append(self.deletedPocketdict['H'][i][j+1])
67
+ elif self.deletedPocketdict['deltaH'][i][j] == 0:
68
+ if self.deletedPocketdict['deltaH'][i][j-1] < 0:
69
+ self.splitColdTemperatures.append(self.deletedPocketdict['T'][i][j+1])
70
+ self.splitColdH.append(self.deletedPocketdict['H'][i][j+1])
71
+
72
+ elif self.deletedPocketdict['deltaH'][i][j-1] > 0:
73
+ self.splitHotTemperatures.append(self.deletedPocketdict['T'][i][j+1])
74
+ self.splitHotH.append(self.deletedPocketdict['H'][i][j+1])
75
+ else:
76
+ pass
77
+
78
+ else:
79
+ pass
80
+
81
+
82
+
83
+ self.splitColddeltaH = []
84
+ self.splitHotdeltaH = []
85
+ for i in range(len(self.splitColdH)-1):
86
+ self.splitColddeltaH.append(self.splitColdH[i+1]-self.splitColdH[i])
87
+
88
+ for i in range(len(self.splitHotH)-1):
89
+ self.splitHotdeltaH.append(self.splitHotH[i+1]-self.splitHotH[i])
90
+
91
+ self.splitHotTemperatures
92
+ #self.splitHotH.sort(reverse=True)
93
+ self.splitHotdeltaH
94
+ self.splitColdTemperatures
95
+ self.splitColdH
96
+ self.splitColddeltaH
97
+
98
+ self.splitdict['HotTemperatures'].append(self.splitHotTemperatures)
99
+ self.splitdict['HotH'].append(self.splitHotH)
100
+ self.splitdict['HotdeltaH'].append(self.splitHotdeltaH)
101
+ self.splitdict['ColdTemperatures'].append(self.splitColdTemperatures)
102
+ self.splitdict['ColdH'].append(self.splitColdH)
103
+ self.splitdict['ColddeltaH'].append(self.splitColddeltaH)
104
+
105
+ return self.splitdict
pinch_tool/Output/.gitignore ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ *
2
+ !.gitignore
pinch_tool/Pinch_main.py ADDED
@@ -0,0 +1,53 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import csv
2
+ from Modules.Pinch.Pinch import Pinch
3
+ from Modules.Pinch.PinchPlot import PinchPlot
4
+
5
+ class Pinchmain():
6
+ def __init__(self, CSV, options = {}):
7
+ self.PinchAnalyse = Pinch(CSV, options)
8
+ self._options = {}
9
+
10
+ def solvePinch(self, localisation = 'DE'):
11
+
12
+ self.PinchAnalyse.shiftTemperatures()
13
+ self.PinchAnalyse.constructTemperatureInterval()
14
+ self.PinchAnalyse.constructProblemTable()
15
+ self.PinchAnalyse.constructHeatCascade()
16
+ self.PinchAnalyse.constructShiftedCompositeDiagram(localisation)
17
+ self.PinchAnalyse.constructCompositeDiagram(localisation)
18
+ self.PinchAnalyse.constructGrandCompositeCurve(localisation)
19
+
20
+ with open("Buffer file for TotalSiteProfile creation.csv", "w", newline="") as csvfile:
21
+ self.newstreamsdata = csv.writer(csvfile)
22
+ self.newstreamsdata.writerow(self.PinchAnalyse._temperatures)
23
+ self.newstreamsdata.writerow(self.PinchAnalyse.heatCascade)
24
+ self.newstreamsdata.writerow([self.PinchAnalyse.hotUtility])
25
+
26
+
27
+ if self.PinchAnalyse._options['draw'] == True:
28
+ PinchPlot.showPlots()
29
+
30
+ def solvePinchforISSP(self, localisation = 'DE'):
31
+
32
+ self.PinchAnalyse.shiftTemperatures()
33
+ self.PinchAnalyse.constructTemperatureInterval()
34
+ self.PinchAnalyse.constructProblemTable()
35
+ self.PinchAnalyse.constructHeatCascade()
36
+ self.PinchAnalyse.constructShiftedCompositeDiagram(localisation)
37
+
38
+ return[self.PinchAnalyse.shiftedCompositeDiagram, self.PinchAnalyse]
39
+
40
+ def solvePinchforHPI(self, localisation = 'DE'):
41
+
42
+ self.PinchAnalyse.shiftTemperatures()
43
+ self.PinchAnalyse.constructTemperatureInterval()
44
+ self.PinchAnalyse.constructProblemTable()
45
+ self.PinchAnalyse.constructHeatCascade()
46
+ self.PinchAnalyse.constructShiftedCompositeDiagram(localisation)
47
+ self.PinchAnalyse.constructCompositeDiagram(localisation)
48
+ self.PinchAnalyse.constructGrandCompositeCurve(localisation)
49
+
50
+ return(self.PinchAnalyse)
51
+
52
+ #Pinchmain('Example.csv', options={'draw', 'csv'}).solvePinch()
53
+ #Pinchmain('Prozess_neu.csv', options={'draw', 'csv'}).solvePinch()
pinch_tool/README.md ADDED
@@ -0,0 +1,90 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Pinch Tool
2
+ # Python EST Template
3
+
4
+ One of the first things after creating your repo is to provide some short information about the repo and the code it contains.
5
+ **Describe your project in one or two sentences here (and now)!!!**
6
+
7
+
8
+ # Table of contents
9
+ - [Python EST Template](#python-est-template)
10
+ - [Table of contents](#table-of-contents)
11
+ - [Description](#description)
12
+ - [Installation](#installation)
13
+ - [Virtual environments](#virtual-environments)
14
+ - [Creating a venv in the terminal:](#creating-a-venv-in-the-terminal)
15
+ - [Creating a venv in VS Code:](#creating-a-venv-in-vs-code)
16
+ - [Creating a venv in PyCharm:](#creating-a-venv-in-pycharm)
17
+ - [Installing pip packages (Best practise)](#installing-pip-packages-best-practise)
18
+ - [Usage](#usage)
19
+
20
+ ## Description
21
+ Here you are asked to provide a detailed project description about your repository.
22
+ This contains the purpose and structure of your project.
23
+
24
+ ## Installation
25
+ When building a larger project, that depends on multiple packages a detailed description on how to set up your environment might be necessary.
26
+ This is your job...
27
+
28
+
29
+ The following two sections give you a quick overview on best practises on "workspace management" for python coding...enjoy!
30
+
31
+ #### Virtual environments
32
+ When programming different projects over longer periods of time, it can happen that more and more packages are installed, some of which are no longer needed because the corresponding repos have long since been deleted.
33
+ This can become a problem when it comes to storage space on your device and will cause the size of your Python installation to grow rapidly.
34
+
35
+ To solve this problem [virtual environments](https://docs.python.org/3/library/venv.html) (`venv`) come into play.
36
+ These `venv` create a project related virtual python interpreter where the packages are installed, meaning if you delete the project (or more specific the `.venv` folder) all installed packages are deleted as well.
37
+
38
+ Therefore it is suggested to create a `.venv` for each project.
39
+
40
+ ##### Creating a venv in the terminal:
41
+
42
+ ***Due to some windows-related bullshit the following steps may not work in the PowerShell. Instead use the good old Terminal ("Eingabeaufforderung")!***
43
+
44
+ 1. To create a virtual environment using the terminal simply navigate into the project folder or open the terminal directly in this location
45
+ 2. Run the following command to create a new virtual environment: `python -m venv .venv`
46
+ 3. Activate your environment for the workspace by executing the following command: `.venv\Scripts\activate`
47
+
48
+ You are now working in your virtual environment, congrats! :)
49
+
50
+
51
+ ##### Creating a venv in VS Code:
52
+ 1. Install the Python Environment Manager extension. After installation the python-symbol should occur in the sidebar on the left side.
53
+
54
+ 2. Press on the python icon in the sidebar, then click on the plus sign in your workspace environment section (top left) and select `Venv` in the opening pop-up.
55
+
56
+ 3. Use your general python installation as your interpreter, when asked.
57
+
58
+ 4. Now a new `venv` should be created and a workspace environment called `.venv (...)` should be displayed.
59
+
60
+ ##### Creating a venv in PyCharm:
61
+
62
+ **@Henning Rahlf** can you update the section for pycharm please?
63
+
64
+ #### Installing pip packages (Best practise)
65
+ [Pip](https://pip.pypa.io/en/stable/) is one of the most used package manager for python.
66
+ To organize your imported packages and make the required packages visible for other users a common way is to use a `requirements.txt` file.
67
+ This file is included in this template by default.
68
+
69
+ Whenever adding an import to your source code, you should also update your `requirements.txt` by simply add your package name to a new line.
70
+ ```
71
+ ## requirements.txt
72
+ matplotlib
73
+ numpy
74
+ pandas
75
+ ...
76
+ ```
77
+ If your code relies on a specific version of an imported package (and you want to maintain compatibility) you can also specifiy the version as shown below:
78
+ ```
79
+ numpy==1.21.2
80
+ ```
81
+ *Note: You can also use expressions like `>`, `<`, `>=`, `<=` to allow certain ranges of package versions.*
82
+
83
+ To install the required packages you can now simply run the following command:
84
+ ```
85
+ pip install -r requirements.txt
86
+ ```
87
+ insead of installing each package independently.
88
+
89
+ ## Usage
90
+ If required, add instructions on how to use the software here...
pinch_tool/TotalSiteProfile_main.py ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import csv
2
+ from Modules.TotalSiteProfile.TotalSiteProfile import TotalSiteProfile as TSP
3
+ from Modules.TotalSiteProfile.TSPPlot import TSPPlot as TSPPlot
4
+
5
+ class TotalSiteProfilemain():
6
+ def __init__(self, siteDesignation, CSVList, options = {}):
7
+ self.CSVList = CSVList
8
+
9
+ self.TotalSite = TSP(siteDesignation, options)
10
+
11
+ def solveTotalSiteProfile(self, localisation = 'DE', internalHeatTransfer = True):
12
+ for siteProfilecsv in self.CSVList:
13
+ self.TotalSite.importData(siteProfilecsv)
14
+ if internalHeatTransfer == True:
15
+ self.TotalSite.deleteTemperaturePockets()
16
+ else:
17
+ self.TotalSite.noDeletionHelper()
18
+ self.TotalSite.splitHotandCold()
19
+ self.TotalSite.constructTotalSiteProfile(localisation)
20
+
21
+ if self.TotalSite._options['draw'] == True:
22
+ TSPPlot().showPlots()
23
+
24
+ def testsolve(self):
25
+
26
+ for siteProfilecsv in self.CSVList:
27
+ self.TotalSite.importData(siteProfilecsv)
28
+ self.TotalSite.deleteTemperaturePockets()
29
+
30
+ if self.TotalSite._options['draw'] == True:
31
+ TSPPlot().showPlots()
32
+
33
+
34
+ TotalSiteProfilemain('Test', ['Example.csv', 'Example.csv'], options={'draw', 'csv'}).solveTotalSiteProfile(internalHeatTransfer = False)
pinch_tool/pinch_analysis_notebook.ipynb ADDED
The diff for this file is too large to render. See raw diff
 
pinch_tool/requirements.txt ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ numpy
2
+ matplotlib
3
+ tabulate
4
+ CoolProp
pinch_tool/tests/.gitkeep ADDED
File without changes
src/graphics_utils.py ADDED
@@ -0,0 +1,45 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from PIL import Image, ImageDraw
2
+
3
+
4
+ def draw_smooth_ellipse(base_img, bbox, fill=None, outline=None, width=1, scale=4):
5
+ """Draw an anti-aliased ellipse on top of `base_img`.
6
+
7
+ This function creates a larger temporary overlay at `scale` times the
8
+ base size, draws the ellipse there, downsamples with LANCZOS and
9
+ composites it onto the base image. This produces much smoother edges.
10
+
11
+ Args:
12
+ base_img (PIL.Image): RGBA image to draw onto. Returned image is a new
13
+ Image object (alpha-composited).
14
+ bbox (sequence): [x0, y0, x1, y1] bounding box in base image coordinates.
15
+ fill (tuple|str): Fill color (RGBA tuple or PIL color).
16
+ outline (tuple|str): Outline color.
17
+ width (int): Outline width in base-image pixels.
18
+ scale (int): Supersampling factor. 3-6 is usually good. Default 4.
19
+
20
+ Returns:
21
+ PIL.Image: New RGBA image with the ellipse composited.
22
+ """
23
+ if base_img.mode != 'RGBA':
24
+ base_img = base_img.convert('RGBA')
25
+
26
+ w, h = base_img.size
27
+ # Create large transparent overlay
28
+ overlay_large = Image.new('RGBA', (w * scale, h * scale), (0, 0, 0, 0))
29
+ draw_large = ImageDraw.Draw(overlay_large)
30
+
31
+ x0, y0, x1, y1 = bbox
32
+ bbox_large = [int(x0 * scale), int(y0 * scale), int(x1 * scale), int(y1 * scale)]
33
+
34
+ if fill:
35
+ draw_large.ellipse(bbox_large, fill=fill)
36
+
37
+ if outline and width and width > 0:
38
+ draw_large.ellipse(bbox_large, outline=outline, width=max(1, int(width * scale)))
39
+
40
+ # Downsample overlay to original size with LANCZOS (antialiasing)
41
+ overlay_small = overlay_large.resize((w, h), Image.Resampling.LANCZOS)
42
+
43
+ # Composite overlay onto base image
44
+ result = Image.alpha_composite(base_img, overlay_small)
45
+ return result
src/home.py ADDED
@@ -0,0 +1,43 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import streamlit.components.v1 as components
3
+ import base64
4
+
5
+ # Configure the app - this must be the first Streamlit command
6
+ st.set_page_config(
7
+ page_title="HeatTransPlan App",
8
+ page_icon="",
9
+ initial_sidebar_state="expanded"
10
+ )
11
+
12
+ # Apply styles immediately to prevent flash
13
+ st.markdown(
14
+ """
15
+ <style>
16
+ :root {
17
+ font-size: 11px !important;
18
+ }
19
+ section[data-testid="stSidebar"][aria-expanded="true"] {
20
+ width: 180px !important;
21
+ min-width: 180px !important;
22
+ }
23
+ section[data-testid="stSidebar"][aria-expanded="false"] {
24
+ width: 0 !important;
25
+ min-width: 0 !important;
26
+ margin-left: 0 !important;
27
+ }
28
+
29
+ /* Smaller fonts and elements - apply to all elements */
30
+ html, body, .stApp, * {font-size:11px !important;}
31
+ .stMarkdown p, .stMarkdown span, .stMarkdown li {font-size:11px !important;}
32
+ .stButton button {font-size:10px !important; padding:0.1rem 0.3rem !important;}
33
+ .stTextInput input, .stNumberInput input {font-size:10px !important; padding:0.1rem 0.2rem !important;}
34
+ h1 {font-size: 1.5rem !important; margin-bottom: 0.3rem !important;}
35
+ </style>
36
+ """,
37
+ unsafe_allow_html=True,
38
+ )
39
+
40
+
41
+ # Home page content
42
+ st.title("Home page")
43
+
src/pages/data_collection.py ADDED
The diff for this file is too large to render. See raw diff
 
src/pages/potential_analysis.py ADDED
The diff for this file is too large to render. See raw diff
 
src/pages_test/exploration.py ADDED
@@ -0,0 +1,351 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import pandas as pd
3
+ import numpy as np
4
+ import plotly.express as px
5
+ import plotly.graph_objects as go
6
+ import plotly.figure_factory as ff
7
+ from plotly.subplots import make_subplots
8
+ import matplotlib.pyplot as plt
9
+ import seaborn as sns
10
+ from io import BytesIO
11
+
12
+ st.set_page_config(
13
+ page_title="Data Exploration",
14
+ layout="wide",
15
+ initial_sidebar_state="expanded"
16
+ )
17
+
18
+ # Apply styles
19
+ st.markdown(
20
+ """
21
+ <style>
22
+ :root {
23
+ font-size: 11px !important;
24
+ }
25
+ section[data-testid="stSidebar"][aria-expanded="true"] {
26
+ width: 180px !important;
27
+ min-width: 180px !important;
28
+ }
29
+ section[data-testid="stSidebar"][aria-expanded="false"] {
30
+ width: 0 !important;
31
+ min-width: 0 !important;
32
+ margin-left: 0 !important;
33
+ }
34
+ html, body, .stApp, * {font-size:11px !important;}
35
+ .stMarkdown p, .stMarkdown span, .stMarkdown li {font-size:11px !important;}
36
+ .stButton button {font-size:10px !important; padding:0.1rem 0.3rem !important;}
37
+ .stTextInput input, .stNumberInput input {font-size:10px !important; padding:0.1rem 0.2rem !important;}
38
+ h1 {font-size: 1.5rem !important; margin-bottom: 0.3rem !important;}
39
+ h2 {font-size: 1.2rem !important; margin-bottom: 0.2rem !important;}
40
+ h3 {font-size: 1rem !important; margin-bottom: 0.2rem !important;}
41
+ </style>
42
+ """,
43
+ unsafe_allow_html=True,
44
+ )
45
+
46
+ st.title("Data Exploration")
47
+
48
+ st.markdown("Upload a dataset to explore and analyze your data.")
49
+
50
+ uploaded_file = st.file_uploader("Upload your dataset (CSV)", type=["csv"])
51
+
52
+ if uploaded_file is not None:
53
+ try:
54
+ # Read the CSV file
55
+ df = pd.read_csv(uploaded_file)
56
+
57
+ # Store original dataframe in session state
58
+ if 'original_df' not in st.session_state:
59
+ st.session_state['original_df'] = df.copy()
60
+
61
+ # ============================================================
62
+ # SECTION 1: Datetime Column Selection
63
+ # ============================================================
64
+ with st.expander("📅 1. Datetime Column Selection", expanded=True):
65
+ all_columns = ["None"] + list(df.columns)
66
+ datetime_col = st.selectbox(
67
+ "Select the datetime column (if any):",
68
+ options=all_columns,
69
+ index=0,
70
+ help="If your dataset has a datetime column, select it here to enable time series analysis."
71
+ )
72
+
73
+ # Parse datetime if selected
74
+ if datetime_col != "None":
75
+ try:
76
+ df[datetime_col] = pd.to_datetime(df[datetime_col])
77
+ st.success(f"✅ Column '{datetime_col}' successfully parsed as datetime.")
78
+ except Exception as e:
79
+ st.warning(f"⚠️ Could not parse '{datetime_col}' as datetime: {e}")
80
+ datetime_col = "None"
81
+
82
+ # ============================================================
83
+ # SECTION 2: Data Preview
84
+ # ============================================================
85
+ with st.expander("👁️ 2. Data Preview", expanded=True):
86
+ col1, col2, col3 = st.columns(3)
87
+ with col1:
88
+ st.metric("Rows", df.shape[0])
89
+ with col2:
90
+ st.metric("Columns", df.shape[1])
91
+ with col3:
92
+ st.metric("Memory Usage", f"{df.memory_usage(deep=True).sum() / 1024:.2f} KB")
93
+
94
+ st.dataframe(df.head(20), use_container_width=True)
95
+
96
+ # ============================================================
97
+ # SECTION 3: Variable Summary
98
+ # ============================================================
99
+ with st.expander("📊 3. Variable Summary", expanded=True):
100
+ # Classify columns
101
+ summary_data = []
102
+ numerical_cols = []
103
+ categorical_cols = []
104
+ datetime_cols_list = []
105
+
106
+ for col in df.columns:
107
+ col_dtype = df[col].dtype
108
+ missing_count = df[col].isna().sum()
109
+ missing_pct = (missing_count / len(df)) * 100
110
+ unique_count = df[col].nunique()
111
+
112
+ # Determine variable type
113
+ if pd.api.types.is_datetime64_any_dtype(df[col]):
114
+ var_type = "Datetime"
115
+ datetime_cols_list.append(col)
116
+ elif pd.api.types.is_numeric_dtype(df[col]):
117
+ var_type = "Numerical"
118
+ numerical_cols.append(col)
119
+ else:
120
+ # Try to convert to numeric
121
+ try:
122
+ df[col] = pd.to_numeric(df[col], errors='raise')
123
+ var_type = "Numerical (converted)"
124
+ numerical_cols.append(col)
125
+ except:
126
+ # Check if it could be datetime
127
+ if col != datetime_col:
128
+ try:
129
+ pd.to_datetime(df[col].dropna().head(10))
130
+ var_type = "Potential Datetime"
131
+ except:
132
+ var_type = "Categorical"
133
+ categorical_cols.append(col)
134
+ else:
135
+ var_type = "Categorical"
136
+ categorical_cols.append(col)
137
+
138
+ summary_data.append({
139
+ "Column": col,
140
+ "Type": var_type,
141
+ "Dtype": str(col_dtype),
142
+ "Missing": missing_count,
143
+ "Missing %": f"{missing_pct:.2f}%",
144
+ "Unique": unique_count,
145
+ "Sample": str(df[col].dropna().iloc[0]) if len(df[col].dropna()) > 0 else "N/A"
146
+ })
147
+
148
+ summary_df = pd.DataFrame(summary_data)
149
+ st.dataframe(summary_df, use_container_width=True)
150
+
151
+ # Missing values visualization
152
+ st.markdown("#### Missing Values")
153
+ missing_data = df.isnull().sum()
154
+ missing_data = missing_data[missing_data > 0]
155
+
156
+ if len(missing_data) > 0:
157
+ fig_missing = px.bar(
158
+ x=missing_data.index,
159
+ y=missing_data.values,
160
+ labels={'x': 'Column', 'y': 'Missing Count'},
161
+ title='Missing Values per Column',
162
+ color=missing_data.values,
163
+ color_continuous_scale='Reds'
164
+ )
165
+ fig_missing.update_layout(height=400)
166
+ st.plotly_chart(fig_missing, use_container_width=True)
167
+ else:
168
+ st.success("✅ No missing values in the dataset!")
169
+
170
+ # Filter numerical columns (exclude datetime) - do this once for all sections
171
+ num_cols_for_dist = [c for c in numerical_cols if c != datetime_col]
172
+
173
+ # ============================================================
174
+ # SECTION 4: Distributions
175
+ # ============================================================
176
+ with st.expander("📈 4. Variable Distributions", expanded=False):
177
+ if num_cols_for_dist:
178
+ st.markdown("#### Numerical Variables")
179
+
180
+ # Create distribution plots in a grid
181
+ n_cols = min(3, len(num_cols_for_dist))
182
+ n_rows = (len(num_cols_for_dist) + n_cols - 1) // n_cols
183
+
184
+ for row_idx in range(n_rows):
185
+ cols = st.columns(n_cols)
186
+ for col_idx in range(n_cols):
187
+ var_idx = row_idx * n_cols + col_idx
188
+ if var_idx < len(num_cols_for_dist):
189
+ var_name = num_cols_for_dist[var_idx]
190
+ with cols[col_idx]:
191
+ fig = px.histogram(
192
+ df,
193
+ x=var_name,
194
+ title=f'{var_name}',
195
+ marginal="box",
196
+ nbins=30
197
+ )
198
+ fig.update_layout(
199
+ height=300,
200
+ showlegend=False,
201
+ margin=dict(l=20, r=20, t=40, b=20)
202
+ )
203
+ st.plotly_chart(fig, use_container_width=True)
204
+
205
+ if categorical_cols:
206
+ st.markdown("#### Categorical Variables")
207
+
208
+ # Limit to top 10 categories for each variable
209
+ for cat_col in categorical_cols[:5]: # Limit to first 5 categorical columns
210
+ value_counts = df[cat_col].value_counts().head(10)
211
+ fig = px.bar(
212
+ x=value_counts.index.astype(str),
213
+ y=value_counts.values,
214
+ labels={'x': cat_col, 'y': 'Count'},
215
+ title=f'Distribution of {cat_col} (Top 10)'
216
+ )
217
+ fig.update_layout(height=300)
218
+ st.plotly_chart(fig, use_container_width=True)
219
+
220
+ # ============================================================
221
+ # SECTION 5: Time Series Visualization
222
+ # ============================================================
223
+ with st.expander("⏱️ 5. Time Series Visualization", expanded=False):
224
+ if datetime_col != "None":
225
+ # Sort by datetime
226
+ df_sorted = df.sort_values(by=datetime_col)
227
+ n_points = len(df_sorted)
228
+
229
+ st.info(f"Dataset has {n_points} data points. {'Using Plotly (interactive)' if n_points < 4000 else 'Using Seaborn (static) for performance'}.")
230
+
231
+ # Select variables to plot
232
+ ts_vars = st.multiselect(
233
+ "Select variables to visualize as time series:",
234
+ options=num_cols_for_dist,
235
+ default=num_cols_for_dist[:min(3, len(num_cols_for_dist))]
236
+ )
237
+
238
+ if ts_vars:
239
+ if n_points < 4000:
240
+ # Use Plotly for interactive visualization
241
+ for var in ts_vars:
242
+ fig = px.line(
243
+ df_sorted,
244
+ x=datetime_col,
245
+ y=var,
246
+ title=f'{var} over Time'
247
+ )
248
+ fig.update_layout(
249
+ height=350,
250
+ xaxis_title="Time",
251
+ yaxis_title=var,
252
+ hovermode='x unified'
253
+ )
254
+ st.plotly_chart(fig, use_container_width=True)
255
+ else:
256
+ # Use Seaborn/Matplotlib for large datasets
257
+ for var in ts_vars:
258
+ fig, ax = plt.subplots(figsize=(12, 4))
259
+ ax.plot(df_sorted[datetime_col], df_sorted[var], linewidth=0.5)
260
+ ax.set_xlabel("Time")
261
+ ax.set_ylabel(var)
262
+ ax.set_title(f'{var} over Time')
263
+ plt.xticks(rotation=45)
264
+ plt.tight_layout()
265
+ st.pyplot(fig)
266
+ plt.close()
267
+
268
+ st.caption("Using static plots for better performance with large datasets (>4000 points).")
269
+ else:
270
+ st.info("Select a datetime column in Section 1 to enable time series visualization.")
271
+
272
+ # ============================================================
273
+ # SECTION 6: Correlation Matrix
274
+ # ============================================================
275
+ with st.expander("🔗 6. Correlation Matrix", expanded=False):
276
+ if len(num_cols_for_dist) > 1:
277
+ # Calculate correlation matrix using only numeric data
278
+ numeric_df = df[num_cols_for_dist].select_dtypes(include=[np.number])
279
+ corr_matrix = numeric_df.corr()
280
+
281
+ # Get column names as lists
282
+ x_labels = corr_matrix.columns.tolist()
283
+ y_labels = corr_matrix.index.tolist()
284
+
285
+ # Get correlation values as a 2D list
286
+ z_values = corr_matrix.values.tolist()
287
+
288
+ # Create annotation text
289
+ z_text = [[f'{val:.2f}' for val in row] for row in z_values]
290
+
291
+ # Create heatmap using go.Heatmap for more control
292
+ fig_corr = go.Figure(data=go.Heatmap(
293
+ z=z_values,
294
+ x=x_labels,
295
+ y=y_labels,
296
+ colorscale='RdBu_r',
297
+ zmin=-1,
298
+ zmax=1,
299
+ text=z_text,
300
+ texttemplate='%{text}',
301
+ textfont={"size": 10},
302
+ hovertemplate='%{x} vs %{y}: %{z:.3f}<extra></extra>'
303
+ ))
304
+
305
+ fig_corr.update_layout(
306
+ title='Correlation Matrix',
307
+ height=max(400, len(x_labels) * 40),
308
+ width=max(600, len(x_labels) * 50),
309
+ xaxis=dict(tickangle=45),
310
+ yaxis=dict(autorange='reversed')
311
+ )
312
+
313
+ st.plotly_chart(fig_corr, use_container_width=True)
314
+
315
+ # Show highly correlated pairs
316
+ st.markdown("#### Highly Correlated Pairs (|r| > 0.7)")
317
+ high_corr = []
318
+ for i in range(len(corr_matrix.columns)):
319
+ for j in range(i+1, len(corr_matrix.columns)):
320
+ corr_val = corr_matrix.iloc[i, j]
321
+ if not np.isnan(corr_val) and abs(corr_val) > 0.7:
322
+ high_corr.append({
323
+ "Variable 1": corr_matrix.columns[i],
324
+ "Variable 2": corr_matrix.columns[j],
325
+ "Correlation": f"{corr_val:.3f}"
326
+ })
327
+
328
+ if high_corr:
329
+ st.dataframe(pd.DataFrame(high_corr), use_container_width=True)
330
+ else:
331
+ st.info("No highly correlated pairs found (|r| > 0.7)")
332
+ else:
333
+ st.warning("Need at least 2 numerical columns for correlation analysis.")
334
+
335
+ # ============================================================
336
+ # SECTION 7: Statistical Summary
337
+ # ============================================================
338
+ with st.expander("📋 7. Statistical Summary", expanded=False):
339
+ if num_cols_for_dist:
340
+ numeric_df = df[num_cols_for_dist].select_dtypes(include=[np.number])
341
+ stats_df = numeric_df.describe().T
342
+ stats_df['skewness'] = numeric_df.skew()
343
+ stats_df['kurtosis'] = numeric_df.kurtosis()
344
+ st.dataframe(stats_df.round(3), use_container_width=True)
345
+ else:
346
+ st.warning("No numerical columns available for statistical summary.")
347
+
348
+ except Exception as e:
349
+ st.error(f"Error loading file: {e}")
350
+ import traceback
351
+ st.code(traceback.format_exc())
src/pages_test/home_testing.py ADDED
@@ -0,0 +1,56 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import streamlit.components.v1 as components
3
+ import base64
4
+
5
+ # Configure the app - this must be the first Streamlit command
6
+ st.set_page_config(
7
+ page_title="HeatTransPlan App",
8
+ page_icon="",
9
+ initial_sidebar_state="expanded"
10
+ )
11
+
12
+ # Apply styles immediately to prevent flash
13
+ st.markdown(
14
+ """
15
+ <style>
16
+ :root {
17
+ font-size: 11px !important;
18
+ }
19
+ section[data-testid="stSidebar"][aria-expanded="true"] {
20
+ width: 180px !important;
21
+ min-width: 180px !important;
22
+ }
23
+ section[data-testid="stSidebar"][aria-expanded="false"] {
24
+ width: 0 !important;
25
+ min-width: 0 !important;
26
+ margin-left: 0 !important;
27
+ }
28
+
29
+ /* Smaller fonts and elements - apply to all elements */
30
+ html, body, .stApp, * {font-size:11px !important;}
31
+ .stMarkdown p, .stMarkdown span, .stMarkdown li {font-size:11px !important;}
32
+ .stButton button {font-size:10px !important; padding:0.1rem 0.3rem !important;}
33
+ .stTextInput input, .stNumberInput input {font-size:10px !important; padding:0.1rem 0.2rem !important;}
34
+ h1 {font-size: 1.5rem !important; margin-bottom: 0.3rem !important;}
35
+ </style>
36
+ """,
37
+ unsafe_allow_html=True,
38
+ )
39
+
40
+ # Display the logo
41
+ st.image("../data/symbol.svg", width=200)
42
+ # Home page content
43
+ st.title("Home page")
44
+
45
+
46
+ st.subheader("Information")
47
+
48
+ # Display a clickable image from the HeatTransPlan website
49
+ with open("../data/image_project.jpeg", "rb") as f:
50
+ image_data = f.read()
51
+ encoded_image = base64.b64encode(image_data).decode()
52
+ st.markdown(f'<a href="https://www.heattransplan.de/" target="_blank"><img src="data:image/jpeg;base64,{encoded_image}" width="400" style="transition: transform 0.3s ease; border: none;" onmouseover="this.style.transform=\'scale(1.1)\'" onmouseout="this.style.transform=\'scale(1)\'"></a>', unsafe_allow_html=True)
53
+ st.markdown("About HeatTransPlan")
54
+
55
+ st.subheader("Navigate to Pages")
56
+ st.page_link("pages/data_collection.py", label="📊 Energy Data Collection", help="Collect and manage energy data")
src/potential_analysis_map.py ADDED
@@ -0,0 +1,1165 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import sys
3
+ import os
4
+ import plotly.graph_objects as go
5
+ import matplotlib.pyplot as plt
6
+ import tempfile
7
+ import csv
8
+ from io import BytesIO
9
+ from PIL import Image, ImageDraw, ImageFont
10
+ from graphics_utils import draw_smooth_ellipse
11
+ import math
12
+
13
+ # Add the pinch_tool directory to the path for imports
14
+ pinch_tool_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..', 'pinch_tool'))
15
+ if pinch_tool_path not in sys.path:
16
+ sys.path.insert(0, pinch_tool_path)
17
+
18
+ # Import pinch analysis modules
19
+ try:
20
+ from Modules.Pinch.Pinch import Pinch
21
+ PINCH_AVAILABLE = True
22
+ PINCH_IMPORT_ERROR = None
23
+ except ImportError as e:
24
+ PINCH_AVAILABLE = False
25
+ PINCH_IMPORT_ERROR = str(e)
26
+
27
+ st.set_page_config(
28
+ page_title="Potential Analysis",
29
+ initial_sidebar_state="collapsed",
30
+ layout="wide"
31
+ )
32
+
33
+ # Helper function to convert lon/lat to pixel coordinates on snapshot
34
+ def snapshot_lonlat_to_pixel(lon_val_in, lat_val_in, center_ll, z_level, img_w, img_h):
35
+ def lonlat_to_xy(lon_inner, lat_inner, z_val):
36
+ lat_rad = math.radians(lat_inner)
37
+ n_val = 2.0 ** z_val
38
+ xtile = (lon_inner + 180.0) / 360.0 * n_val
39
+ ytile = (1.0 - math.log(math.tan(lat_rad) + 1 / math.cos(lat_rad)) / math.pi) / 2.0 * n_val
40
+ return xtile, ytile
41
+ lon0, lat0 = center_ll
42
+ xtile0, ytile0 = lonlat_to_xy(lon0, lat0, z_level)
43
+ xtile, ytile = lonlat_to_xy(lon_val_in, lat_val_in, z_level)
44
+ dxtile = xtile - xtile0
45
+ dytile = ytile - ytile0
46
+ px_per_tile = 256
47
+ snapshot_px = img_w / 2 + dxtile * px_per_tile
48
+ snapshot_py = img_h / 2 + dytile * px_per_tile
49
+ return snapshot_px, snapshot_py
50
+
51
+ # Apply styles immediately to prevent flash
52
+ st.markdown(
53
+ """
54
+ <style>
55
+ :root {
56
+ font-size: 11px !important;
57
+ }
58
+ section[data-testid="stSidebar"][aria-expanded="true"] {
59
+ width: 180px !important;
60
+ min-width: 180px !important;
61
+ }
62
+ section[data-testid="stSidebar"][aria-expanded="false"] {
63
+ width: 0 !important;
64
+ min-width: 0 !important;
65
+ margin-left: 0 !important;
66
+ }
67
+
68
+ /* Smaller fonts and elements - apply to all elements */
69
+ html, body, .stApp, * {font-size:11px !important;}
70
+ .stMarkdown p, .stMarkdown span, .stMarkdown li {font-size:11px !important; margin:0 !important; padding:0 !important;}
71
+ .stButton button {font-size:10px !important; padding:0.1rem 0.3rem !important;}
72
+ .stTextInput input, .stNumberInput input {font-size:10px !important; padding:0.1rem 0.2rem !important;}
73
+ h1 {font-size: 1.5rem !important; margin-bottom: 0.3rem !important;}
74
+ /* Compact layout */
75
+ .block-container {padding-top: 1rem !important; padding-bottom: 0 !important;}
76
+ div[data-testid="stVerticalBlock"] > div {padding: 0 !important; margin: 0 !important;}
77
+ hr {margin: 0.3rem 0 !important;}
78
+ .stCheckbox {margin: 0 !important; padding: 0 !important;}
79
+ div[data-testid="stHorizontalBlock"] {gap: 0.2rem !important;}
80
+ </style>
81
+ """,
82
+ unsafe_allow_html=True,
83
+ )
84
+
85
+ st.title("Potential Analysis")
86
+
87
+ # =====================================================
88
+ # HELPER FUNCTION: Generate mini-map with kW circles for each STREAM
89
+ # =====================================================
90
+ def generate_stream_kw_minimap(processes, map_snapshot, map_center, map_zoom, max_width=500, max_height=400):
91
+ """
92
+ Generate a mini-map image showing each stream as a circle sized by kW.
93
+ Streams are positioned near their parent subprocess location.
94
+ Returns a PIL Image or None if no snapshot available.
95
+ """
96
+ if not map_snapshot:
97
+ return None
98
+
99
+ try:
100
+ # Load the base map snapshot
101
+ base_img = Image.open(BytesIO(map_snapshot)).convert("RGBA")
102
+ orig_w, orig_h = base_img.size
103
+
104
+ # Calculate scale to fit within max dimensions while maintaining aspect ratio
105
+ scale = min(max_width / orig_w, max_height / orig_h)
106
+ new_w = int(orig_w * scale)
107
+ new_h = int(orig_h * scale)
108
+
109
+ # Resize the base image
110
+ base_img = base_img.resize((new_w, new_h), Image.Resampling.LANCZOS)
111
+
112
+ # Create drawing context
113
+ draw = ImageDraw.Draw(base_img)
114
+
115
+ # Try to load a font
116
+ try:
117
+ font = ImageFont.truetype("/System/Library/Fonts/Arial.ttf", 11)
118
+ font_small = ImageFont.truetype("/System/Library/Fonts/Arial.ttf", 9)
119
+ except (OSError, IOError):
120
+ try:
121
+ font = ImageFont.truetype("/System/Library/Fonts/Helvetica.ttc", 11)
122
+ font_small = ImageFont.truetype("/System/Library/Fonts/Helvetica.ttc", 9)
123
+ except (OSError, IOError):
124
+ font = ImageFont.load_default()
125
+ font_small = font
126
+
127
+ # Collect all streams with their kW values and positions
128
+ all_streams = []
129
+
130
+ for proc_idx, subprocess in enumerate(processes):
131
+ sub_lat = subprocess.get('lat')
132
+ sub_lon = subprocess.get('lon')
133
+ subprocess_name = subprocess.get('name', f'Subprocess {proc_idx + 1}')
134
+
135
+ streams = subprocess.get('streams', [])
136
+
137
+ for s_idx, stream in enumerate(streams):
138
+ stream_name = stream.get('name', f'Stream {s_idx + 1}')
139
+
140
+ # Extract stream data
141
+ props = stream.get('properties', {})
142
+ vals = stream.get('values', {})
143
+
144
+ tin = None
145
+ tout = None
146
+ mdot = None
147
+ cp_val = None
148
+
149
+ if isinstance(props, dict) and isinstance(vals, dict):
150
+ for pk, pname in props.items():
151
+ vk = pk.replace('prop', 'val')
152
+ v = vals.get(vk, '')
153
+
154
+ if pname == 'Tin' and v:
155
+ try:
156
+ tin = float(v)
157
+ except (ValueError, TypeError):
158
+ pass
159
+ elif pname == 'Tout' and v:
160
+ try:
161
+ tout = float(v)
162
+ except (ValueError, TypeError):
163
+ pass
164
+ elif pname == 'ṁ' and v:
165
+ try:
166
+ mdot = float(v)
167
+ except (ValueError, TypeError):
168
+ pass
169
+ elif pname == 'cp' and v:
170
+ try:
171
+ cp_val = float(v)
172
+ except (ValueError, TypeError):
173
+ pass
174
+
175
+ # Fallback to legacy fields
176
+ if tin is None and stream.get('temp_in'):
177
+ try:
178
+ tin = float(stream['temp_in'])
179
+ except (ValueError, TypeError):
180
+ pass
181
+ if tout is None and stream.get('temp_out'):
182
+ try:
183
+ tout = float(stream['temp_out'])
184
+ except (ValueError, TypeError):
185
+ pass
186
+ if mdot is None and stream.get('mdot'):
187
+ try:
188
+ mdot = float(stream['mdot'])
189
+ except (ValueError, TypeError):
190
+ pass
191
+ if cp_val is None and stream.get('cp'):
192
+ try:
193
+ cp_val = float(stream['cp'])
194
+ except (ValueError, TypeError):
195
+ pass
196
+
197
+ # Calculate kW = mdot * cp * |ΔT|
198
+ stream_kw = 0.0
199
+ is_hot = None
200
+ if tin is not None and tout is not None and mdot is not None and cp_val is not None:
201
+ delta_t = abs(tin - tout)
202
+ stream_kw = mdot * cp_val * delta_t
203
+ is_hot = tin > tout # True = HOT (cooling), False = COLD (heating)
204
+
205
+ all_streams.append({
206
+ 'proc_idx': proc_idx,
207
+ 'stream_idx': s_idx,
208
+ 'subprocess_name': subprocess_name,
209
+ 'stream_name': stream_name,
210
+ 'lat': sub_lat,
211
+ 'lon': sub_lon,
212
+ 'kw': stream_kw,
213
+ 'is_hot': is_hot,
214
+ 'tin': tin,
215
+ 'tout': tout
216
+ })
217
+
218
+ # Find max kW for scaling circle sizes
219
+ kw_values = [s['kw'] for s in all_streams if s['kw'] > 0]
220
+ max_kw = max(kw_values) if kw_values else 1.0
221
+ if max_kw == 0:
222
+ max_kw = 1.0
223
+
224
+ # Group streams by subprocess for positioning
225
+ subprocess_streams = {}
226
+ for s in all_streams:
227
+ key = s['proc_idx']
228
+ if key not in subprocess_streams:
229
+ subprocess_streams[key] = []
230
+ subprocess_streams[key].append(s)
231
+
232
+ # Draw circles for each stream
233
+ for proc_idx, streams_list in subprocess_streams.items():
234
+ if not streams_list:
235
+ continue
236
+
237
+ # Get subprocess position
238
+ first_stream = streams_list[0]
239
+ lat = first_stream['lat']
240
+ lon = first_stream['lon']
241
+
242
+ if lat is None or lon is None:
243
+ continue
244
+
245
+ try:
246
+ lat_f = float(lat)
247
+ lon_f = float(lon)
248
+
249
+ # Convert to pixel coordinates (on original size, then scale)
250
+ base_px, base_py = snapshot_lonlat_to_pixel(
251
+ lon_f, lat_f,
252
+ (map_center[1], map_center[0]),
253
+ map_zoom,
254
+ orig_w, orig_h
255
+ )
256
+
257
+ # Scale to new dimensions
258
+ base_px = base_px * scale
259
+ base_py = base_py * scale
260
+
261
+ # Skip if outside bounds
262
+ if base_px < -50 or base_py < -50 or base_px > new_w + 50 or base_py > new_h + 50:
263
+ continue
264
+
265
+ # Draw subprocess name first
266
+ subprocess_name = first_stream['subprocess_name']
267
+ if font:
268
+ bbox = draw.textbbox((0, 0), subprocess_name, font=font)
269
+ tw = bbox[2] - bbox[0]
270
+ th = bbox[3] - bbox[1]
271
+ else:
272
+ tw = len(subprocess_name) * 6
273
+ th = 10
274
+
275
+ # Draw subprocess label above streams
276
+ label_x = int(base_px - tw / 2)
277
+ label_y = int(base_py - 50)
278
+ draw.rectangle([label_x - 3, label_y - 2, label_x + tw + 3, label_y + th + 2],
279
+ fill=(255, 255, 255, 230), outline=(100, 100, 100, 200))
280
+ draw.text((label_x, label_y), subprocess_name, fill=(0, 0, 0, 255), font=font)
281
+
282
+ # Position streams in a row below the label
283
+ n_streams = len(streams_list)
284
+ stream_spacing = 45 # pixels between stream circles
285
+ start_x = base_px - (n_streams - 1) * stream_spacing / 2
286
+
287
+ for i, stream_data in enumerate(streams_list):
288
+ px = start_x + i * stream_spacing
289
+ py = base_py
290
+
291
+ kw = stream_data['kw']
292
+ is_hot = stream_data['is_hot']
293
+
294
+ # Calculate circle radius based on kW (min 12, max 35 pixels)
295
+ if kw > 0:
296
+ radius = 12 + (kw / max_kw) * 23
297
+ else:
298
+ radius = 10
299
+
300
+ # Determine color based on hot/cold
301
+ if is_hot is True:
302
+ fill_color = (255, 80, 80, 220) # Red for HOT
303
+ border_color = (180, 30, 30, 255)
304
+ elif is_hot is False:
305
+ fill_color = (80, 140, 255, 220) # Blue for COLD
306
+ border_color = (30, 80, 180, 255)
307
+ else:
308
+ fill_color = (180, 180, 180, 180) # Gray for unknown
309
+ border_color = (120, 120, 120, 220)
310
+
311
+ # Draw circle
312
+ x0 = int(px - radius)
313
+ y0 = int(py - radius)
314
+ x1 = int(px + radius)
315
+ y1 = int(py + radius)
316
+
317
+ base_img = draw_smooth_ellipse(base_img, [x0, y0, x1, y1], fill=fill_color, outline=border_color, width=2)
318
+ draw = ImageDraw.Draw(base_img)
319
+
320
+ # Draw kW label inside circle
321
+ if kw > 0:
322
+ kw_text = f"{kw:.0f}"
323
+ if font_small:
324
+ bbox = draw.textbbox((0, 0), kw_text, font=font_small)
325
+ text_w = bbox[2] - bbox[0]
326
+ text_h = bbox[3] - bbox[1]
327
+ else:
328
+ text_w = len(kw_text) * 5
329
+ text_h = 8
330
+
331
+ tx = int(px - text_w / 2)
332
+ ty = int(py - text_h / 2)
333
+
334
+ # White text for visibility
335
+ draw.text((tx, ty), kw_text, fill=(255, 255, 255, 255), font=font_small)
336
+
337
+ # Draw stream name below circle
338
+ stream_name = stream_data['stream_name']
339
+ if font_small:
340
+ bbox = draw.textbbox((0, 0), stream_name, font=font_small)
341
+ name_w = bbox[2] - bbox[0]
342
+ name_h = bbox[3] - bbox[1]
343
+ else:
344
+ name_w = len(stream_name) * 5
345
+ name_h = 8
346
+
347
+ name_x = int(px - name_w / 2)
348
+ name_y = int(py + radius + 4)
349
+
350
+ draw.rectangle([name_x - 2, name_y - 1, name_x + name_w + 2, name_y + name_h + 1],
351
+ fill=(255, 255, 255, 220))
352
+ draw.text((name_x, name_y), stream_name, fill=(0, 0, 0, 255), font=font_small)
353
+
354
+ except (ValueError, TypeError):
355
+ continue
356
+
357
+ # Add legend in top-left corner
358
+ legend_x = 10
359
+ legend_y = 10
360
+ legend_w = 70
361
+ legend_h = 55
362
+
363
+ # Legend background
364
+ draw.rectangle([legend_x, legend_y, legend_x + legend_w, legend_y + legend_h],
365
+ fill=(255, 255, 255, 240), outline=(150, 150, 150, 200))
366
+
367
+ # Legend title
368
+ draw.text((legend_x + 5, legend_y + 3), "kW", fill=(0, 0, 0, 255), font=font)
369
+
370
+ # Hot indicator
371
+ base_img = draw_smooth_ellipse(base_img, [legend_x + 5, legend_y + 20, legend_x + 17, legend_y + 32],
372
+ fill=(255, 80, 80, 220), outline=(180, 30, 30, 255), width=1)
373
+ draw = ImageDraw.Draw(base_img)
374
+ draw.text((legend_x + 22, legend_y + 21), "Hot", fill=(0, 0, 0, 255), font=font_small)
375
+
376
+ # Cold indicator
377
+ base_img = draw_smooth_ellipse(base_img, [legend_x + 5, legend_y + 37, legend_x + 17, legend_y + 49],
378
+ fill=(80, 140, 255, 220), outline=(30, 80, 180, 255), width=1)
379
+ draw = ImageDraw.Draw(base_img)
380
+ draw.text((legend_x + 22, legend_y + 38), "Cold", fill=(0, 0, 0, 255), font=font_small)
381
+
382
+ return base_img
383
+
384
+ except Exception as e:
385
+ return None
386
+
387
+ # Initialize session state for selections if not exists
388
+ if 'selected_items' not in st.session_state:
389
+ st.session_state['selected_items'] = {}
390
+
391
+ # Get processes from session state
392
+ processes = st.session_state.get('processes', [])
393
+
394
+ if not processes:
395
+ st.info("No processes found. Please add processes in the Data Collection page first.")
396
+ else:
397
+ # Helper function to determine stream type and extract data
398
+ def get_stream_info(stream):
399
+ """Extract Tin, Tout, mdot, cp from stream and determine if HOT or COLD"""
400
+ properties = stream.get('properties', {})
401
+ values = stream.get('values', {})
402
+
403
+ tin = None
404
+ tout = None
405
+ mdot = None
406
+ cp_val = None
407
+
408
+ # Check properties dict structure
409
+ if isinstance(properties, dict) and isinstance(values, dict):
410
+ for pk, pname in properties.items():
411
+ vk = pk.replace('prop', 'val')
412
+ v = values.get(vk, '')
413
+
414
+ if pname == 'Tin' and v:
415
+ try:
416
+ tin = float(v)
417
+ except (ValueError, TypeError):
418
+ pass
419
+ elif pname == 'Tout' and v:
420
+ try:
421
+ tout = float(v)
422
+ except (ValueError, TypeError):
423
+ pass
424
+ elif pname == 'ṁ' and v:
425
+ try:
426
+ mdot = float(v)
427
+ except (ValueError, TypeError):
428
+ pass
429
+ elif pname == 'cp' and v:
430
+ try:
431
+ cp_val = float(v)
432
+ except (ValueError, TypeError):
433
+ pass
434
+
435
+ # Fallback to legacy fields
436
+ if tin is None and stream.get('temp_in'):
437
+ try:
438
+ tin = float(stream['temp_in'])
439
+ except (ValueError, TypeError):
440
+ pass
441
+ if tout is None and stream.get('temp_out'):
442
+ try:
443
+ tout = float(stream['temp_out'])
444
+ except (ValueError, TypeError):
445
+ pass
446
+ if mdot is None and stream.get('mdot'):
447
+ try:
448
+ mdot = float(stream['mdot'])
449
+ except (ValueError, TypeError):
450
+ pass
451
+ if cp_val is None and stream.get('cp'):
452
+ try:
453
+ cp_val = float(stream['cp'])
454
+ except (ValueError, TypeError):
455
+ pass
456
+
457
+ # Determine stream type
458
+ stream_type = None
459
+ if tin is not None and tout is not None:
460
+ if tin > tout:
461
+ stream_type = "HOT"
462
+ else:
463
+ stream_type = "COLD"
464
+
465
+ # Calculate CP if possible
466
+ cp_flow = None
467
+ if mdot is not None and cp_val is not None:
468
+ cp_flow = mdot * cp_val
469
+
470
+ # Calculate kW = mdot * cp * |ΔT|
471
+ kw = None
472
+ if tin is not None and tout is not None and mdot is not None and cp_val is not None:
473
+ kw = mdot * cp_val * abs(tin - tout)
474
+
475
+ return {
476
+ 'tin': tin,
477
+ 'tout': tout,
478
+ 'mdot': mdot,
479
+ 'cp': cp_val,
480
+ 'CP': cp_flow,
481
+ 'kW': kw,
482
+ 'type': stream_type
483
+ }
484
+
485
+ # =====================================================
486
+ # TWO-COLUMN LAYOUT: Stream Selection (left) + Map (right)
487
+ # =====================================================
488
+ stream_col, map_col = st.columns([1, 1.2])
489
+
490
+ with stream_col:
491
+ st.markdown("**Select streams for analysis:**")
492
+
493
+ # Display each process and its streams
494
+ for idx, process in enumerate(processes):
495
+ process_name = process.get('name', f'Subprocess {idx + 1}')
496
+
497
+ # Only show process header if it has streams
498
+ streams = process.get('streams', [])
499
+ if streams:
500
+ st.markdown(f"**{process_name}**")
501
+
502
+ for stream_idx, stream in enumerate(streams):
503
+ stream_key = f"stream_{idx}_{stream_idx}"
504
+ if stream_key not in st.session_state['selected_items']:
505
+ st.session_state['selected_items'][stream_key] = False
506
+
507
+ stream_cols = st.columns([0.05, 0.20, 0.75])
508
+ stream_selected = stream_cols[0].checkbox(
509
+ "S",
510
+ key=f"cb_{stream_key}",
511
+ value=st.session_state['selected_items'][stream_key],
512
+ label_visibility="collapsed"
513
+ )
514
+ st.session_state['selected_items'][stream_key] = stream_selected
515
+
516
+ # Display stream name
517
+ stream_name = stream.get('name', f'Stream {stream_idx + 1}')
518
+ stream_cols[1].write(stream_name)
519
+
520
+ # Get stream info and display type + key values
521
+ info = get_stream_info(stream)
522
+
523
+ display_parts = []
524
+ if info['tin'] is not None:
525
+ display_parts.append(f"Tin:{info['tin']}°C")
526
+ if info['tout'] is not None:
527
+ display_parts.append(f"Tout:{info['tout']}°C")
528
+ if info['kW'] is not None:
529
+ display_parts.append(f"**{info['kW']:.0f} kW**")
530
+
531
+ if info['type']:
532
+ type_color = "🔴" if info['type'] == "HOT" else "🔵"
533
+ display_parts.append(f"{type_color} {info['type']}")
534
+
535
+ if display_parts:
536
+ stream_cols[2].caption(' | '.join(display_parts))
537
+ else:
538
+ stream_cols[2].caption("(incomplete data)")
539
+
540
+ with map_col:
541
+ st.markdown("**Energy Map Overview (circle size = kW):**")
542
+
543
+ # Generate and display mini-map with kW circles for each stream
544
+ map_snapshot = st.session_state.get('map_snapshot')
545
+ map_snapshots = st.session_state.get('map_snapshots', {})
546
+ current_base = st.session_state.get('current_base', 'OpenStreetMap')
547
+
548
+ # Use the appropriate snapshot based on current base layer
549
+ if current_base in map_snapshots:
550
+ snapshot_to_use = map_snapshots[current_base]
551
+ else:
552
+ snapshot_to_use = map_snapshot
553
+
554
+ map_center = st.session_state.get('map_center', [51.708, 8.772])
555
+ map_zoom = st.session_state.get('map_zoom', 17.5)
556
+
557
+ if snapshot_to_use:
558
+ minimap_img = generate_stream_kw_minimap(
559
+ processes=processes,
560
+ map_snapshot=snapshot_to_use,
561
+ map_center=map_center,
562
+ map_zoom=map_zoom,
563
+ max_width=600,
564
+ max_height=450
565
+ )
566
+
567
+ if minimap_img:
568
+ st.image(minimap_img)
569
+ else:
570
+ st.caption("📍 Could not generate map preview")
571
+ else:
572
+ st.info("📍 Lock the map in Data Collection page first to see the energy overview.")
573
+
574
+ # Count selected streams
575
+ selected_count = sum(1 for k, v in st.session_state['selected_items'].items()
576
+ if v and k.startswith("stream_"))
577
+
578
+ # =====================================================
579
+ # PINCH ANALYSIS SECTION
580
+ # =====================================================
581
+ st.markdown("---")
582
+
583
+ if not PINCH_AVAILABLE:
584
+ st.error(f"Pinch analysis module not available: {PINCH_IMPORT_ERROR or 'Unknown error'}")
585
+ st.info("Please ensure the pinch_tool module is properly installed.")
586
+ else:
587
+ # Helper function to extract stream data from selection
588
+ def extract_stream_data(procs, sel_items):
589
+ """
590
+ Extract stream data from selected items.
591
+ Returns list of dicts with: CP (calculated as mdot * cp), Tin, Tout
592
+ """
593
+ result_streams = []
594
+
595
+ for sel_key, is_sel in sel_items.items():
596
+ if not is_sel:
597
+ continue
598
+
599
+ if sel_key.startswith("stream_"):
600
+ parts_split = sel_key.split("_")
601
+ p_idx = int(parts_split[1])
602
+ s_idx = int(parts_split[2])
603
+
604
+ if p_idx < len(procs):
605
+ proc = procs[p_idx]
606
+ proc_streams = proc.get('streams', [])
607
+
608
+ if s_idx < len(proc_streams):
609
+ strm = proc_streams[s_idx]
610
+
611
+ # Extract values from properties/values structure
612
+ props = strm.get('properties', {})
613
+ vals = strm.get('values', {})
614
+
615
+ tin = None
616
+ tout = None
617
+ mdot = None
618
+ cp_val = None
619
+
620
+ # Check properties dict structure
621
+ if isinstance(props, dict) and isinstance(vals, dict):
622
+ for pk, pname in props.items():
623
+ vk = pk.replace('prop', 'val')
624
+ v = vals.get(vk, '')
625
+
626
+ if pname == 'Tin' and v:
627
+ try:
628
+ tin = float(v)
629
+ except (ValueError, TypeError):
630
+ pass
631
+ elif pname == 'Tout' and v:
632
+ try:
633
+ tout = float(v)
634
+ except (ValueError, TypeError):
635
+ pass
636
+ elif pname == 'ṁ' and v:
637
+ try:
638
+ mdot = float(v)
639
+ except (ValueError, TypeError):
640
+ pass
641
+ elif pname == 'cp' and v:
642
+ try:
643
+ cp_val = float(v)
644
+ except (ValueError, TypeError):
645
+ pass
646
+
647
+ # Fallback to legacy fields
648
+ if tin is None and strm.get('temp_in'):
649
+ try:
650
+ tin = float(strm['temp_in'])
651
+ except (ValueError, TypeError):
652
+ pass
653
+ if tout is None and strm.get('temp_out'):
654
+ try:
655
+ tout = float(strm['temp_out'])
656
+ except (ValueError, TypeError):
657
+ pass
658
+ if mdot is None and strm.get('mdot'):
659
+ try:
660
+ mdot = float(strm['mdot'])
661
+ except (ValueError, TypeError):
662
+ pass
663
+ if cp_val is None and strm.get('cp'):
664
+ try:
665
+ cp_val = float(strm['cp'])
666
+ except (ValueError, TypeError):
667
+ pass
668
+
669
+ # Calculate CP = mdot * cp
670
+ if tin is not None and tout is not None and mdot is not None and cp_val is not None:
671
+ CP = mdot * cp_val
672
+ strm_name = strm.get('name', f'Stream {s_idx + 1}')
673
+ proc_nm = proc.get('name', f'Subprocess {p_idx + 1}')
674
+ result_streams.append({
675
+ 'name': f"{proc_nm} - {strm_name}",
676
+ 'CP': CP,
677
+ 'Tin': tin,
678
+ 'Tout': tout
679
+ })
680
+
681
+ return result_streams
682
+
683
+ # Helper function to run pinch analysis
684
+ def run_pinch_analysis(strm_data, delta_tmin):
685
+ """
686
+ Run pinch analysis on the given stream data.
687
+ Returns the Pinch object with results.
688
+ """
689
+ # Create a temporary CSV file with the stream data
690
+ with tempfile.NamedTemporaryFile(mode='w', suffix='.csv', delete=False, newline='') as f:
691
+ writer = csv.writer(f)
692
+ writer.writerow(['Tmin', str(delta_tmin)])
693
+ writer.writerow(['CP', 'TSUPPLY', 'TTARGET'])
694
+
695
+ for strm in strm_data:
696
+ writer.writerow([strm['CP'], strm['Tin'], strm['Tout']])
697
+
698
+ temp_csv_path = f.name
699
+
700
+ try:
701
+ # Run pinch analysis without drawing (we'll draw ourselves)
702
+ pinch_obj = Pinch(temp_csv_path, options={})
703
+ pinch_obj.shiftTemperatures()
704
+ pinch_obj.constructTemperatureInterval()
705
+ pinch_obj.constructProblemTable()
706
+ pinch_obj.constructHeatCascade()
707
+ pinch_obj.constructShiftedCompositeDiagram('EN')
708
+ pinch_obj.constructCompositeDiagram('EN')
709
+ pinch_obj.constructGrandCompositeCurve('EN')
710
+
711
+ return pinch_obj
712
+ finally:
713
+ # Clean up temp file
714
+ os.unlink(temp_csv_path)
715
+
716
+ # Extract stream data from selections
717
+ streams_data = extract_stream_data(processes, st.session_state['selected_items'])
718
+
719
+ if len(streams_data) < 2:
720
+ st.info("Select at least 2 streams with complete data (Tin, Tout, ṁ, cp) to run pinch analysis.")
721
+
722
+ # Show what data is missing for selected streams
723
+ if selected_count > 0:
724
+ st.markdown("**Data status for selected items:**")
725
+ for sel_key, is_sel in st.session_state['selected_items'].items():
726
+ if not is_sel:
727
+ continue
728
+ if sel_key.startswith("stream_"):
729
+ parts_split = sel_key.split("_")
730
+ p_idx = int(parts_split[1])
731
+ s_idx = int(parts_split[2])
732
+
733
+ if p_idx < len(processes):
734
+ proc = processes[p_idx]
735
+ proc_streams = proc.get('streams', [])
736
+
737
+ if s_idx < len(proc_streams):
738
+ strm = proc_streams[s_idx]
739
+ strm_name = strm.get('name', f'Stream {s_idx + 1}')
740
+ proc_nm = proc.get('name', f'Subprocess {p_idx + 1}')
741
+
742
+ # Check what data is available
743
+ props = strm.get('properties', {})
744
+ vals = strm.get('values', {})
745
+
746
+ has_tin = False
747
+ has_tout = False
748
+ has_mdot = False
749
+ has_cp = False
750
+
751
+ if isinstance(props, dict) and isinstance(vals, dict):
752
+ for pk, pname in props.items():
753
+ vk = pk.replace('prop', 'val')
754
+ v = vals.get(vk, '')
755
+ if pname == 'Tin' and v:
756
+ has_tin = True
757
+ elif pname == 'Tout' and v:
758
+ has_tout = True
759
+ elif pname == 'ṁ' and v:
760
+ has_mdot = True
761
+ elif pname == 'cp' and v:
762
+ has_cp = True
763
+
764
+ # Fallback to legacy
765
+ if not has_tin and strm.get('temp_in'):
766
+ has_tin = True
767
+ if not has_tout and strm.get('temp_out'):
768
+ has_tout = True
769
+ if not has_mdot and strm.get('mdot'):
770
+ has_mdot = True
771
+ if not has_cp and strm.get('cp'):
772
+ has_cp = True
773
+
774
+ missing = []
775
+ if not has_tin:
776
+ missing.append("Tin")
777
+ if not has_tout:
778
+ missing.append("Tout")
779
+ if not has_mdot:
780
+ missing.append("ṁ")
781
+ if not has_cp:
782
+ missing.append("cp")
783
+
784
+ if missing:
785
+ st.warning(f"⚠️ {proc_nm} - {strm_name}: Missing {', '.join(missing)}")
786
+ else:
787
+ st.success(f"✅ {proc_nm} - {strm_name}: Complete data")
788
+ else:
789
+ # Auto-run pinch analysis
790
+ try:
791
+ # Row: Shifted toggle | ΔTmin (small) | spacer | Hot Utility | Cold Utility | Pinch Temp
792
+ toggle_col, tmin_col, spacer, metric1, metric2, metric3 = st.columns([0.6, 0.5, 0.4, 0.7, 0.7, 0.7])
793
+
794
+ with toggle_col:
795
+ show_shifted = st.toggle("Show Shifted Composite Curves", value=False, key="shifted_toggle")
796
+
797
+ with tmin_col:
798
+ tmin = st.number_input(
799
+ "ΔTmin",
800
+ min_value=1.0,
801
+ max_value=50.0,
802
+ value=10.0,
803
+ step=1.0,
804
+ key="tmin_input",
805
+ format="%.0f"
806
+ )
807
+
808
+ pinch = run_pinch_analysis(streams_data, tmin)
809
+ results = {
810
+ 'hot_utility': pinch.hotUtility,
811
+ 'cold_utility': pinch.coldUtility,
812
+ 'pinch_temperature': pinch.pinchTemperature,
813
+ 'tmin': pinch.tmin,
814
+ 'composite_diagram': pinch.compositeDiagram,
815
+ 'shifted_composite_diagram': pinch.shiftedCompositeDiagram,
816
+ 'grand_composite_curve': pinch.grandCompositeCurve,
817
+ 'heat_cascade': pinch.heatCascade,
818
+ 'unfeasible_heat_cascade': pinch.unfeasibleHeatCascade,
819
+ 'problem_table': pinch.problemTable,
820
+ 'temperatures': pinch._temperatures,
821
+ 'streams': list(pinch.streams)
822
+ }
823
+
824
+ metric1.metric("Hot Utility", f"{results['hot_utility']:.2f} kW")
825
+ metric2.metric("Cold Utility", f"{results['cold_utility']:.2f} kW")
826
+ metric3.metric("Pinch Temp", f"{results['pinch_temperature']:.1f} °C")
827
+
828
+ # Side by side plots: Composite Curves (left) and Grand Composite Curve (right)
829
+ plot_col1, plot_col2 = st.columns(2)
830
+
831
+ # Build hover text for streams
832
+ hot_streams = [s for s in streams_data if s['Tin'] > s['Tout']]
833
+ cold_streams = [s for s in streams_data if s['Tin'] < s['Tout']]
834
+
835
+ with plot_col1:
836
+ fig1 = go.Figure()
837
+
838
+ # Select which diagram to show
839
+ if show_shifted:
840
+ diagram = results['shifted_composite_diagram']
841
+ curve_label = "Shifted"
842
+ title_text = "Shifted Composite Curves"
843
+ # For shifted, temperatures are shifted by ±Tmin/2
844
+ tmin_half = results['tmin'] / 2
845
+ else:
846
+ diagram = results['composite_diagram']
847
+ curve_label = ""
848
+ title_text = "Composite Curves"
849
+ tmin_half = 0
850
+
851
+ # Hot composite curve with hover info
852
+ hot_T = diagram['hot']['T']
853
+ hot_H = diagram['hot']['H']
854
+
855
+ # Create hover text for hot curve points
856
+ hot_hover = []
857
+ for i, (h, t) in enumerate(zip(hot_H, hot_T)):
858
+ # Find streams at this temperature (adjust for shifted temps)
859
+ if show_shifted:
860
+ actual_t = t + tmin_half # Convert back to actual temp
861
+ else:
862
+ actual_t = t
863
+ matching = [s['name'] for s in hot_streams if min(s['Tin'], s['Tout']) <= actual_t <= max(s['Tin'], s['Tout'])]
864
+ stream_info = '<br>'.join(matching) if matching else 'Composite'
865
+ label = f"<b>Hot {curve_label}</b>" if curve_label else "<b>Hot Composite</b>"
866
+ hot_hover.append(f"{label}<br>T: {t:.1f}°C<br>H: {h:.1f} kW<br>Streams: {stream_info}")
867
+
868
+ fig1.add_trace(go.Scatter(
869
+ x=hot_H, y=hot_T,
870
+ mode='lines+markers',
871
+ name='Hot',
872
+ line=dict(color='red', width=2),
873
+ marker=dict(size=6),
874
+ hovertemplate='%{text}<extra></extra>',
875
+ text=hot_hover
876
+ ))
877
+
878
+ # Cold composite curve with hover info
879
+ cold_T = diagram['cold']['T']
880
+ cold_H = diagram['cold']['H']
881
+
882
+ # Create hover text for cold curve points
883
+ cold_hover = []
884
+ for i, (h, t) in enumerate(zip(cold_H, cold_T)):
885
+ if show_shifted:
886
+ actual_t = t - tmin_half # Convert back to actual temp
887
+ else:
888
+ actual_t = t
889
+ matching = [s['name'] for s in cold_streams if min(s['Tin'], s['Tout']) <= actual_t <= max(s['Tin'], s['Tout'])]
890
+ stream_info = '<br>'.join(matching) if matching else 'Composite'
891
+ label = f"<b>Cold {curve_label}</b>" if curve_label else "<b>Cold Composite</b>"
892
+ cold_hover.append(f"{label}<br>T: {t:.1f}°C<br>H: {h:.1f} kW<br>Streams: {stream_info}")
893
+
894
+ fig1.add_trace(go.Scatter(
895
+ x=cold_H, y=cold_T,
896
+ mode='lines+markers',
897
+ name='Cold',
898
+ line=dict(color='blue', width=2),
899
+ marker=dict(size=6),
900
+ hovertemplate='%{text}<extra></extra>',
901
+ text=cold_hover
902
+ ))
903
+
904
+ # Pinch temperature line
905
+ fig1.add_hline(
906
+ y=results['pinch_temperature'],
907
+ line_dash='dash',
908
+ line_color='gray',
909
+ annotation_text=f"Pinch: {results['pinch_temperature']:.1f}°C",
910
+ annotation_position='top right'
911
+ )
912
+
913
+ fig1.update_layout(
914
+ title=dict(text=title_text, font=dict(size=14)),
915
+ xaxis_title='Enthalpy H (kW)',
916
+ yaxis_title='Temperature T (°C)',
917
+ height=400,
918
+ margin=dict(l=60, r=20, t=40, b=50),
919
+ legend=dict(x=0.7, y=0.1),
920
+ hovermode='closest',
921
+ xaxis=dict(rangemode='tozero'),
922
+ yaxis=dict(rangemode='tozero')
923
+ )
924
+
925
+ st.plotly_chart(fig1, width='stretch', key="composite_chart")
926
+
927
+ with plot_col2:
928
+ fig2 = go.Figure()
929
+
930
+ gcc_H = results['grand_composite_curve']['H']
931
+ gcc_T = results['grand_composite_curve']['T']
932
+ heat_cascade = results['heat_cascade']
933
+ temperatures = results['temperatures']
934
+
935
+ # Create hover text for GCC points
936
+ gcc_hover = []
937
+ for i, (h, t) in enumerate(zip(gcc_H, gcc_T)):
938
+ if i < len(heat_cascade):
939
+ dh = heat_cascade[i]['deltaH']
940
+ region = 'Heat deficit (needs heating)' if dh > 0 else ('Heat surplus (needs cooling)' if dh < 0 else 'Balanced')
941
+ else:
942
+ region = ''
943
+ gcc_hover.append(f"<b>GCC</b><br>Shifted T: {t:.1f}°C<br>Net ΔH: {h:.1f} kW<br>{region}")
944
+
945
+ # Plot GCC with color segments
946
+ for i in range(len(gcc_H) - 1):
947
+ if i < len(heat_cascade):
948
+ if heat_cascade[i]['deltaH'] > 0:
949
+ color = 'red'
950
+ elif heat_cascade[i]['deltaH'] < 0:
951
+ color = 'blue'
952
+ else:
953
+ color = 'gray'
954
+ else:
955
+ color = 'gray'
956
+
957
+ fig2.add_trace(go.Scatter(
958
+ x=[gcc_H[i], gcc_H[i+1]],
959
+ y=[gcc_T[i], gcc_T[i+1]],
960
+ mode='lines+markers',
961
+ line=dict(color=color, width=2),
962
+ marker=dict(size=6, color=color),
963
+ hovertemplate='%{text}<extra></extra>',
964
+ text=[gcc_hover[i], gcc_hover[i+1] if i+1 < len(gcc_hover) else ''],
965
+ showlegend=False
966
+ ))
967
+
968
+ # Pinch temperature line
969
+ fig2.add_hline(
970
+ y=results['pinch_temperature'],
971
+ line_dash='dash',
972
+ line_color='gray',
973
+ annotation_text=f"Pinch: {results['pinch_temperature']:.1f}°C",
974
+ annotation_position='top right'
975
+ )
976
+
977
+ # Zero enthalpy line
978
+ fig2.add_vline(x=0, line_color='black', line_width=1, opacity=0.3)
979
+
980
+ fig2.update_layout(
981
+ title=dict(text='Grand Composite Curve', font=dict(size=14)),
982
+ xaxis_title='Net ΔH (kW)',
983
+ yaxis_title='Shifted Temperature (°C)',
984
+ height=400,
985
+ margin=dict(l=60, r=20, t=40, b=50),
986
+ hovermode='closest',
987
+ yaxis=dict(rangemode='tozero')
988
+ )
989
+
990
+ st.plotly_chart(fig2, width='stretch', key="gcc_chart")
991
+
992
+ # More information expander
993
+ with st.expander("More information"):
994
+ import pandas as pd
995
+
996
+
997
+ temps = results['temperatures']
998
+ pinch_streams = results['streams']
999
+
1000
+ if pinch_streams and temps:
1001
+ fig_interval = go.Figure()
1002
+
1003
+ num_streams = len(pinch_streams)
1004
+ x_positions = [(i + 1) * 1.0 for i in range(num_streams)]
1005
+
1006
+ # Draw horizontal temperature lines
1007
+ for temperature in temps:
1008
+ fig_interval.add_shape(
1009
+ type="line",
1010
+ x0=0, x1=num_streams + 1,
1011
+ y0=temperature, y1=temperature,
1012
+ line=dict(color="gray", width=1, dash="dot"),
1013
+ )
1014
+
1015
+ # Draw pinch temperature line
1016
+ fig_interval.add_shape(
1017
+ type="line",
1018
+ x0=0, x1=num_streams + 1,
1019
+ y0=results['pinch_temperature'], y1=results['pinch_temperature'],
1020
+ line=dict(color="black", width=2, dash="dash"),
1021
+ )
1022
+ fig_interval.add_annotation(
1023
+ x=num_streams + 0.5, y=results['pinch_temperature'],
1024
+ text=f"Pinch: {results['pinch_temperature']:.1f}°C",
1025
+ showarrow=False, font=dict(size=10),
1026
+ xanchor='left'
1027
+ )
1028
+
1029
+ # Draw stream arrows
1030
+ for i, stream in enumerate(pinch_streams):
1031
+ ss = stream['ss'] # Shifted supply temp
1032
+ st_temp = stream['st'] # Shifted target temp
1033
+ stream_type = stream['type']
1034
+ x_pos = x_positions[i]
1035
+
1036
+ # Color based on stream type
1037
+ color = 'red' if stream_type == 'HOT' else 'blue'
1038
+ stream_name = streams_data[i]['name'] if i < len(streams_data) else f'Stream {i+1}'
1039
+
1040
+ # Draw arrow as a line with annotation for arrowhead
1041
+ fig_interval.add_trace(go.Scatter(
1042
+ x=[x_pos, x_pos],
1043
+ y=[ss, st_temp],
1044
+ mode='lines',
1045
+ line=dict(color=color, width=8),
1046
+ hovertemplate=f"<b>{stream_name}</b><br>" +
1047
+ f"Type: {stream_type}<br>" +
1048
+ f"T_supply (shifted): {ss:.1f}°C<br>" +
1049
+ f"T_target (shifted): {st_temp:.1f}°C<br>" +
1050
+ f"CP: {stream['cp']:.2f} kW/K<extra></extra>",
1051
+ showlegend=False
1052
+ ))
1053
+
1054
+ # Add arrowhead
1055
+ fig_interval.add_annotation(
1056
+ x=x_pos, y=st_temp,
1057
+ ax=x_pos, ay=ss,
1058
+ xref='x', yref='y',
1059
+ axref='x', ayref='y',
1060
+ showarrow=True,
1061
+ arrowhead=2,
1062
+ arrowsize=1.5,
1063
+ arrowwidth=3,
1064
+ arrowcolor=color
1065
+ )
1066
+
1067
+ # Stream label at top
1068
+ label_y = max(ss, st_temp) + (max(temps) - min(temps)) * 0.03
1069
+ fig_interval.add_annotation(
1070
+ x=x_pos, y=label_y,
1071
+ text=f"<b>S{i+1}</b>",
1072
+ showarrow=False,
1073
+ font=dict(size=11, color='white'),
1074
+ bgcolor=color,
1075
+ bordercolor='black',
1076
+ borderwidth=1,
1077
+ borderpad=3
1078
+ )
1079
+
1080
+ # CP value in middle
1081
+ mid_y = (ss + st_temp) / 2
1082
+ fig_interval.add_annotation(
1083
+ x=x_pos, y=mid_y,
1084
+ text=f"CP={stream['cp']:.1f}",
1085
+ showarrow=False,
1086
+ font=dict(size=9, color='white'),
1087
+ textangle=-90
1088
+ )
1089
+
1090
+ fig_interval.update_layout(
1091
+ title=dict(text='Shifted Temperature Interval Diagram', font=dict(size=14)),
1092
+ xaxis=dict(
1093
+ title='Streams',
1094
+ showticklabels=False,
1095
+ range=[0, num_streams + 1],
1096
+ showgrid=False
1097
+ ),
1098
+ yaxis=dict(
1099
+ title='Shifted Temperature S (°C)',
1100
+ showgrid=True,
1101
+ gridcolor='rgba(0,0,0,0.1)'
1102
+ ),
1103
+ height=400,
1104
+ margin=dict(l=60, r=20, t=40, b=40),
1105
+ hovermode='closest',
1106
+ showlegend=False
1107
+ )
1108
+
1109
+ st.plotly_chart(fig_interval, width='stretch', key="interval_chart")
1110
+
1111
+ st.markdown("---")
1112
+
1113
+ # Problem Table
1114
+ st.markdown("##### Problem Table")
1115
+ if results['problem_table']:
1116
+ problem_df = pd.DataFrame(results['problem_table'])
1117
+ # Rename columns for clarity
1118
+ col_rename = {
1119
+ 'T': 'T (°C)',
1120
+ 'deltaT': 'ΔT (°C)',
1121
+ 'cpHot': 'ΣCP Hot (kW/K)',
1122
+ 'cpCold': 'ΣCP Cold (kW/K)',
1123
+ 'deltaCp': 'ΔCP (kW/K)',
1124
+ 'deltaH': 'ΔH (kW)'
1125
+ }
1126
+ problem_df = problem_df.rename(columns={k: v for k, v in col_rename.items() if k in problem_df.columns})
1127
+ st.dataframe(problem_df, width='stretch', hide_index=True)
1128
+ else:
1129
+ st.info("No problem table data available")
1130
+
1131
+ # Heat Cascades side by side
1132
+ cascade_col1, cascade_col2 = st.columns(2)
1133
+
1134
+ with cascade_col1:
1135
+ st.markdown("##### Unfeasible Heat Cascade")
1136
+ if results['unfeasible_heat_cascade']:
1137
+ # Add temperature column to dataframe
1138
+ unfeasible_data = []
1139
+ for i, item in enumerate(results['unfeasible_heat_cascade']):
1140
+ row = {'T (°C)': temps[i+1] if i+1 < len(temps) else '',
1141
+ 'ΔH (kW)': item['deltaH'],
1142
+ 'Cascade (kW)': item['exitH']}
1143
+ unfeasible_data.append(row)
1144
+ unfeasible_df = pd.DataFrame(unfeasible_data)
1145
+ st.dataframe(unfeasible_df, width='stretch', hide_index=True)
1146
+ else:
1147
+ st.info("No unfeasible cascade data")
1148
+
1149
+ with cascade_col2:
1150
+ st.markdown("##### Feasible Heat Cascade")
1151
+ if results['heat_cascade']:
1152
+ # Add temperature column to dataframe
1153
+ feasible_data = []
1154
+ for i, item in enumerate(results['heat_cascade']):
1155
+ row = {'T (°C)': temps[i+1] if i+1 < len(temps) else '',
1156
+ 'ΔH (kW)': item['deltaH'],
1157
+ 'Cascade (kW)': item['exitH']}
1158
+ feasible_data.append(row)
1159
+ feasible_df = pd.DataFrame(feasible_data)
1160
+ st.dataframe(feasible_df, width='stretch', hide_index=True)
1161
+ else:
1162
+ st.info("No feasible cascade data")
1163
+
1164
+ except Exception as e:
1165
+ st.error(f"Error: {str(e)}")
src/process_models.json ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "Drying": {
3
+ "Spray drying": ["milk powder", "coffee powder", "egg powder"],
4
+ "Granular drying": ["grain", "sand", "plastic granules"],
5
+ "Tunnel drying": ["fruits", "vegetables", "ceramics"],
6
+ "Contact drying": ["paper", "food", "wood"],
7
+ "Freeze drying": ["coffee extract", "fruit extract"]
8
+ },
9
+ "Autoclaving": {
10
+ "Sterilization": ["canned food", "medical tools"],
11
+ "Hardening": ["vulcanization of rubber"]
12
+ },
13
+ "Thermal activation solid products": {
14
+ "Boiling small scale": ["canned food", "vegetables"],
15
+ "Boiling large scale": ["specialty chemicals", "food"]
16
+ },
17
+ "Thermal activation fluid products": {
18
+ "Continuous bath heating": ["boiling vegetables", "pasteurization"],
19
+ "Continuous heating of fluid": ["alcohol chemical conversion", "milk pasteurization"]
20
+ },
21
+ "Thermal separation": {
22
+ "Distillation": ["alcohol chemical conversion", "specialty chemicals"],
23
+ "Evaporation": ["fruit juice", "milk"],
24
+ "Shipping": ["sugar syrup"]
25
+ }
26
+ }
src/process_utils.py ADDED
@@ -0,0 +1,436 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pandas as pd
2
+ import io
3
+ import copy
4
+ from typing import List, Dict, Tuple, Optional, Any, Generator
5
+
6
+ def init_process_state(session_state):
7
+ if 'processes' not in session_state:
8
+ session_state['processes'] = [] # list of proc dicts
9
+ if 'selected_process_idx' not in session_state:
10
+ session_state['selected_process_idx'] = None
11
+
12
+ REQUIRED_PROC_COLS = {"name","next","conntemp","product_tout","connm","conncp","stream_no","mdot","temp_in","temp_out","cp"}
13
+
14
+ # Level names for different hierarchy depths
15
+ LEVEL_NAMES = {
16
+ 0: 'Process',
17
+ 1: 'Subprocess',
18
+ 2: 'Sub-subprocess',
19
+ 3: 'Sub-sub-subprocess',
20
+ 4: 'Sub-sub-sub-subprocess'
21
+ }
22
+
23
+ def get_level_name(level: int) -> str:
24
+ """Get the display name for a hierarchy level."""
25
+ return LEVEL_NAMES.get(level, f'Level-{level} Process')
26
+
27
+
28
+ def create_stream(name: str = '', stream_type: str = 'product') -> Dict[str, Any]:
29
+ """
30
+ Create a new stream with consistent structure.
31
+ This is the reusable function for creating streams at any level.
32
+
33
+ Args:
34
+ name: Name of the stream
35
+ stream_type: Type of stream (product, steam, air, water)
36
+
37
+ Returns:
38
+ A dict representing the stream with all standard fields
39
+ """
40
+ return {
41
+ 'name': name,
42
+ 'type': stream_type,
43
+ 'properties': {
44
+ 'prop1': 'Tin',
45
+ 'prop2': 'Tout',
46
+ 'prop3': 'ṁ',
47
+ 'prop4': 'cp'
48
+ },
49
+ 'values': {
50
+ 'val1': '',
51
+ 'val2': '',
52
+ 'val3': '',
53
+ 'val4': ''
54
+ },
55
+ # Legacy fields for backward compatibility
56
+ 'mdot': '',
57
+ 'temp_in': '',
58
+ 'temp_out': '',
59
+ 'cp': ''
60
+ }
61
+
62
+
63
+ def create_process_node(name: str = '', level: int = 0) -> Dict[str, Any]:
64
+ """
65
+ Create a new process/subprocess/sub-subprocess node with a consistent structure.
66
+ This is a REUSABLE function that creates nodes at ANY hierarchy level.
67
+
68
+ The same structure is used for:
69
+ - Processes (level 0)
70
+ - Subprocesses (level 1)
71
+ - Sub-subprocesses (level 2)
72
+ - And so on...
73
+
74
+ Args:
75
+ name: Name of the process node
76
+ level: Hierarchy level (0=process, 1=subprocess, 2=sub-subprocess, etc.)
77
+
78
+ Returns:
79
+ A dict representing the process node with all standard fields
80
+ """
81
+ return {
82
+ 'name': name or f'{get_level_name(level)} 1',
83
+ 'level': level,
84
+ 'next': '',
85
+ 'conntemp': '', # Product Tin
86
+ 'product_tout': '', # Product Tout
87
+ 'connm': '', # Product ṁ
88
+ 'conncp': '', # Product cp
89
+ 'streams': [],
90
+ 'children': [], # Sub-nodes (subprocesses, sub-subprocesses, etc.) - RECURSIVE!
91
+ 'lat': None,
92
+ 'lon': None,
93
+ 'box_scale': 1.0 if level > 0 else 1.5,
94
+ 'extra_info': {
95
+ 'air_tin': '',
96
+ 'air_tout': '',
97
+ 'air_mdot': '',
98
+ 'air_cp': '',
99
+ 'water_content_in': '',
100
+ 'water_content_out': '',
101
+ 'density': '',
102
+ 'pressure': '',
103
+ 'notes': ''
104
+ },
105
+ 'expanded': False, # UI state: whether this node is expanded
106
+ 'info_expanded': False, # UI state: whether info section is expanded
107
+ 'model': {'level1': None, 'level2': None}, # Process model selection
108
+ 'params': { # Process parameters
109
+ 'tin': '', 'tout': '', 'time': '', 'cp': '',
110
+ 'mass_flow': None, 'thermal_power': None
111
+ },
112
+ 'params_requested': False,
113
+ 'hours': '' # Operating hours
114
+ }
115
+
116
+
117
+ def add_child_to_node(parent_node: Dict[str, Any], child_name: str = '') -> Dict[str, Any]:
118
+ """
119
+ Add a child node (subprocess/sub-subprocess/etc.) to a parent node.
120
+ This works recursively at ANY level - the same function is used to:
121
+ - Add subprocess to process
122
+ - Add sub-subprocess to subprocess
123
+ - Add sub-sub-subprocess to sub-subprocess
124
+ - etc.
125
+
126
+ Args:
127
+ parent_node: The parent process node to add a child to
128
+ child_name: Optional name for the child
129
+
130
+ Returns:
131
+ The newly created child node
132
+ """
133
+ if 'children' not in parent_node:
134
+ parent_node['children'] = []
135
+
136
+ parent_level = parent_node.get('level', 0)
137
+ child_level = parent_level + 1
138
+ child_index = len(parent_node['children'])
139
+
140
+ default_name = f"{get_level_name(child_level)} {child_index + 1}"
141
+
142
+ child_node = create_process_node(
143
+ name=child_name or default_name,
144
+ level=child_level
145
+ )
146
+
147
+ parent_node['children'].append(child_node)
148
+ return child_node
149
+
150
+
151
+ def delete_child_from_node(parent_node: Dict[str, Any], child_index: int) -> bool:
152
+ """
153
+ Delete a child node from a parent node by index.
154
+ Works at any hierarchy level.
155
+
156
+ Args:
157
+ parent_node: The parent process node
158
+ child_index: Index of the child to delete
159
+
160
+ Returns:
161
+ True if successful, False otherwise
162
+ """
163
+ if 'children' not in parent_node:
164
+ return False
165
+
166
+ children = parent_node['children']
167
+ if 0 <= child_index < len(children):
168
+ children.pop(child_index)
169
+ return True
170
+ return False
171
+
172
+
173
+ def add_stream_to_node(node: Dict[str, Any], stream_name: str = '') -> Dict[str, Any]:
174
+ """
175
+ Add a stream to ANY process node (process, subprocess, sub-subprocess, etc.).
176
+ This is the REUSABLE function for adding streams at any level.
177
+
178
+ Args:
179
+ node: The process node to add a stream to
180
+ stream_name: Optional name for the stream
181
+
182
+ Returns:
183
+ The newly created stream
184
+ """
185
+ if 'streams' not in node:
186
+ node['streams'] = []
187
+
188
+ stream_count = len(node['streams'])
189
+ stream = create_stream(
190
+ name=stream_name or f'Stream {stream_count + 1}',
191
+ stream_type='product'
192
+ )
193
+ node['streams'].append(stream)
194
+ return stream
195
+
196
+
197
+ def delete_stream_from_node(node: Dict[str, Any], stream_index: int) -> bool:
198
+ """
199
+ Delete a stream from ANY process node.
200
+ This is the REUSABLE function for deleting streams at any level.
201
+
202
+ Args:
203
+ node: The process node
204
+ stream_index: Index of the stream to delete
205
+
206
+ Returns:
207
+ True if successful, False otherwise
208
+ """
209
+ if 'streams' not in node:
210
+ return False
211
+
212
+ streams = node['streams']
213
+ if 0 <= stream_index < len(streams):
214
+ streams.pop(stream_index)
215
+ return True
216
+ return False
217
+
218
+
219
+ def iterate_all_nodes(nodes: List[Dict[str, Any]]) -> Generator[Tuple[Dict[str, Any], int, List[int]], None, None]:
220
+ """
221
+ Generator that yields all nodes in a tree structure (depth-first).
222
+ Useful for operations that need to traverse the entire hierarchy.
223
+
224
+ Args:
225
+ nodes: List of root nodes
226
+
227
+ Yields:
228
+ Tuples of (node, level, path_indices)
229
+ - node: The process node
230
+ - level: Hierarchy level (0=process, 1=subprocess, etc.)
231
+ - path_indices: List of indices from root to this node, e.g., [0, 2, 1]
232
+ """
233
+ def _iterate(node_list: List[Dict], level: int, path: List[int]):
234
+ for i, node in enumerate(node_list):
235
+ current_path = path + [i]
236
+ yield node, level, current_path
237
+ # Recurse into children
238
+ children = node.get('children', [])
239
+ if children:
240
+ yield from _iterate(children, level + 1, current_path)
241
+
242
+ yield from _iterate(nodes, 0, [])
243
+
244
+
245
+ def get_node_by_path(root_nodes: List[Dict[str, Any]], path: List[int]) -> Optional[Dict[str, Any]]:
246
+ """
247
+ Get a node by its path indices (e.g., [0, 1, 2] means process 0, child 1, grandchild 2).
248
+
249
+ Args:
250
+ root_nodes: List of root process nodes
251
+ path: List of indices forming the path to the node
252
+
253
+ Returns:
254
+ The node if found, None otherwise
255
+ """
256
+ if not path or path[0] >= len(root_nodes):
257
+ return None
258
+
259
+ current = root_nodes[path[0]]
260
+
261
+ for idx in path[1:]:
262
+ children = current.get('children', [])
263
+ if idx >= len(children):
264
+ return None
265
+ current = children[idx]
266
+
267
+ return current
268
+
269
+
270
+ def copy_streams_to_all_descendants(parent_node: Dict[str, Any]):
271
+ """
272
+ Copy streams from a parent node to ALL its descendants (children, grandchildren, etc.).
273
+ This implements the requirement that changing streams should propagate to all descendants.
274
+
275
+ Args:
276
+ parent_node: The parent node whose streams to copy to all descendants
277
+ """
278
+ parent_streams = parent_node.get('streams', [])
279
+
280
+ def _copy_recursive(node: Dict[str, Any]):
281
+ children = node.get('children', [])
282
+ for child in children:
283
+ # Deep copy the streams to child
284
+ child['streams'] = copy.deepcopy(parent_streams)
285
+ # Recurse to grandchildren
286
+ _copy_recursive(child)
287
+
288
+ _copy_recursive(parent_node)
289
+
290
+
291
+ def sync_node_with_parent(child_node: Dict[str, Any], parent_node: Dict[str, Any],
292
+ sync_streams: bool = True, sync_info: bool = False):
293
+ """
294
+ Sync data from parent to a specific child node.
295
+
296
+ Args:
297
+ child_node: The child node to update
298
+ parent_node: The parent node to copy from
299
+ sync_streams: Whether to sync streams
300
+ sync_info: Whether to sync extra_info fields
301
+ """
302
+ if sync_streams:
303
+ child_node['streams'] = copy.deepcopy(parent_node.get('streams', []))
304
+
305
+ if sync_info:
306
+ child_node['extra_info'] = copy.deepcopy(parent_node.get('extra_info', {}))
307
+
308
+
309
+ def count_all_descendants(node: Dict[str, Any]) -> int:
310
+ """
311
+ Count all descendants (children, grandchildren, etc.) of a node.
312
+
313
+ Args:
314
+ node: The node to count descendants for
315
+
316
+ Returns:
317
+ Total number of descendants
318
+ """
319
+ count = 0
320
+ children = node.get('children', [])
321
+ count += len(children)
322
+ for child in children:
323
+ count += count_all_descendants(child)
324
+ return count
325
+
326
+
327
+ # =============================================================================
328
+ # LEGACY FUNCTIONS - Keep for backward compatibility with existing code
329
+ # =============================================================================
330
+
331
+ def parse_process_csv_file(uploaded_file) -> Tuple[Optional[list], str]:
332
+ if uploaded_file is None:
333
+ return None, "No file provided"
334
+ try:
335
+ content = uploaded_file.read()
336
+ text = content.decode('utf-8')
337
+ df = pd.read_csv(io.StringIO(text))
338
+ if not REQUIRED_PROC_COLS.issubset(df.columns):
339
+ return None, "CSV missing required columns"
340
+ procs = []
341
+ proc_lookup = {}
342
+ for _, row in df.iterrows():
343
+ # Include product_tout in key for uniqueness if present
344
+ key = (row['name'], row['next'], row['conntemp'], row.get('product_tout',''), row['connm'], row['conncp'])
345
+ if key not in proc_lookup:
346
+ p = {
347
+ "name": row.get('name',''),
348
+ "next": row.get('next',''),
349
+ "conntemp": row.get('conntemp',''), # Product Tin
350
+ "product_tout": row.get('product_tout',''), # P Tout
351
+ "connm": row.get('connm',''), # P ṁ
352
+ "conncp": row.get('conncp',''), # P cp
353
+ "streams": [],
354
+ "children": [], # Support for sub-levels
355
+ "lat": row.get('lat') if 'lat' in df.columns else None,
356
+ "lon": row.get('lon') if 'lon' in df.columns else None,
357
+ }
358
+ procs.append(p)
359
+ proc_lookup[key] = p
360
+ stream_no = row.get('stream_no')
361
+ if pd.notna(stream_no):
362
+ proc_lookup[key]['streams'].append({
363
+ "mdot": row.get('mdot',''),
364
+ "temp_in": row.get('temp_in',''),
365
+ "temp_out": row.get('temp_out',''),
366
+ "cp": row.get('cp',''),
367
+ })
368
+ return procs, f"Loaded {len(procs)} processes"
369
+ except (UnicodeDecodeError, OSError, ValueError) as e:
370
+ return None, f"Failed: {e}"
371
+
372
+ def processes_to_csv_bytes(processes: List[Dict]) -> bytes:
373
+ """Export processes to CSV, including nested children."""
374
+ rows = []
375
+
376
+ def _add_process_rows(p: Dict, parent_name: str = ''):
377
+ """Recursively add rows for a process and its children."""
378
+ proc_name = p.get('name', '')
379
+ full_name = f"{parent_name} > {proc_name}" if parent_name else proc_name
380
+
381
+ if not p.get('streams'):
382
+ rows.append({
383
+ 'name': full_name, 'next': p.get('next',''), 'conntemp': p.get('conntemp',''),
384
+ 'product_tout': p.get('product_tout',''),
385
+ 'connm': p.get('connm',''), 'conncp': p.get('conncp',''), 'stream_no': '',
386
+ 'mdot':'','temp_in':'','temp_out':'','cp':'',
387
+ 'lat': p.get('lat'), 'lon': p.get('lon'),
388
+ 'level': p.get('level', 0)
389
+ })
390
+ else:
391
+ for idx, s in enumerate(p['streams'], start=1):
392
+ rows.append({
393
+ 'name': full_name, 'next': p.get('next',''), 'conntemp': p.get('conntemp',''),
394
+ 'product_tout': p.get('product_tout',''),
395
+ 'connm': p.get('connm',''), 'conncp': p.get('conncp',''), 'stream_no': idx,
396
+ 'mdot': s.get('mdot',''), 'temp_in': s.get('temp_in',''),
397
+ 'temp_out': s.get('temp_out',''), 'cp': s.get('cp',''),
398
+ 'lat': p.get('lat'), 'lon': p.get('lon'),
399
+ 'level': p.get('level', 0)
400
+ })
401
+
402
+ # Recurse into children
403
+ for child in p.get('children', []):
404
+ _add_process_rows(child, full_name)
405
+
406
+ for p in processes:
407
+ _add_process_rows(p)
408
+
409
+ df = pd.DataFrame(rows)
410
+ buf = io.StringIO()
411
+ df.to_csv(buf, index=False)
412
+ return buf.getvalue().encode('utf-8')
413
+
414
+ def add_process(session_state):
415
+ """Legacy function - adds a subprocess to the flat processes list."""
416
+ new_proc = create_process_node(level=1) # Level 1 for subprocess
417
+ new_proc['name'] = '' # Let UI set the name
418
+ session_state['processes'].append(new_proc)
419
+ session_state['selected_process_idx'] = len(session_state['processes'])-1
420
+
421
+ def delete_process(session_state, idx):
422
+ """Legacy function - deletes a subprocess from the flat processes list."""
423
+ if 0 <= idx < len(session_state['processes']):
424
+ session_state['processes'].pop(idx)
425
+ if session_state['selected_process_idx'] == idx:
426
+ session_state['selected_process_idx'] = None
427
+
428
+ def add_stream_to_process(session_state, pidx):
429
+ """Legacy function - adds a stream to a subprocess in the flat list."""
430
+ if 0 <= pidx < len(session_state['processes']):
431
+ add_stream_to_node(session_state['processes'][pidx])
432
+
433
+ def delete_stream_from_process(session_state, pidx, sidx):
434
+ """Legacy function - deletes a stream from a subprocess in the flat list."""
435
+ if 0 <= pidx < len(session_state['processes']):
436
+ delete_stream_from_node(session_state['processes'][pidx], sidx)
src/requirements.txt ADDED
@@ -0,0 +1,110 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ altair==5.5.0
2
+ annotated-types==0.7.0
3
+ appnope==0.1.4
4
+ asttokens==3.0.1
5
+ attrs==25.3.0
6
+ blinker==1.9.0
7
+ branca==0.8.1
8
+ cachetools==6.2.0
9
+ certifi==2025.8.3
10
+ charset-normalizer==3.4.3
11
+ click==8.2.1
12
+ comm==0.2.3
13
+ contourpy==1.3.3
14
+ CoolProp==7.2.0
15
+ cycler==0.12.1
16
+ dacite==1.9.2
17
+ debugpy==1.8.17
18
+ decorator==5.2.1
19
+ executing==2.2.1
20
+ filetype==1.2.0
21
+ folium==0.20.0
22
+ fonttools==4.61.0
23
+ gitdb==4.0.12
24
+ GitPython==3.1.45
25
+ idna==3.10
26
+ ImageHash==4.3.2
27
+ importlib_metadata==8.7.0
28
+ ipykernel==7.1.0
29
+ ipython==9.7.0
30
+ ipython_pygments_lexers==1.1.1
31
+ jedi==0.19.2
32
+ Jinja2==3.1.6
33
+ joblib==1.5.2
34
+ jsonschema==4.25.1
35
+ jsonschema-specifications==2025.9.1
36
+ jupyter_client==8.6.3
37
+ jupyter_core==5.9.1
38
+ kiwisolver==1.4.9
39
+ llvmlite==0.45.1
40
+ MarkupSafe==3.0.2
41
+ matplotlib==3.10.0
42
+ matplotlib-inline==0.2.1
43
+ minify_html==0.18.1
44
+ multimethod==1.12
45
+ narwhals==2.4.0
46
+ nest_asyncio==1.6.0
47
+ networkx==3.6
48
+ numba==0.62.1
49
+ numpy==2.3.2
50
+ packaging==25.0
51
+ pandas==2.3.2
52
+ parso==0.8.5
53
+ patsy==1.0.2
54
+ pexpect==4.9.0
55
+ phik==0.12.5
56
+ pillow==11.3.0
57
+ pip==25.2
58
+ platformdirs==4.5.0
59
+ plotly==6.5.0
60
+ prompt_toolkit==3.0.52
61
+ protobuf==6.32.0
62
+ psutil==7.1.3
63
+ ptyprocess==0.7.0
64
+ pure_eval==0.2.3
65
+ puremagic==1.30
66
+ pyarrow==21.0.0
67
+ pydantic==2.12.5
68
+ pydantic_core==2.41.5
69
+ pydeck==0.9.1
70
+ Pygments==2.19.2
71
+ pyparsing==3.2.5
72
+ python-dateutil==2.9.0.post0
73
+ pytz==2025.2
74
+ PyWavelets==1.9.0
75
+ PyYAML==6.0.3
76
+ pyzmq==27.1.0
77
+ referencing==0.36.2
78
+ reportlab==4.4.5
79
+ requests==2.32.5
80
+ rpds-py==0.27.1
81
+ scipy==1.16.3
82
+ seaborn==0.13.2
83
+ setuptools==78.1.1
84
+ six==1.17.0
85
+ smmap==5.0.2
86
+ stack_data==0.6.3
87
+ staticmap==0.5.7
88
+ statsmodels==0.14.5
89
+ streamlit==1.52.1
90
+ streamlit-folium==0.25.1
91
+ streamlit-image-coordinates==0.4.0
92
+ tabulate==0.9.0
93
+ tenacity==9.1.2
94
+ toml==0.10.2
95
+ tornado==6.5.2
96
+ tqdm==4.67.1
97
+ traitlets==5.14.3
98
+ typeguard==4.4.4
99
+ typing_extensions==4.15.0
100
+ typing-inspection==0.4.2
101
+ tzdata==2025.2
102
+ urllib3==2.5.0
103
+ visions==0.8.1
104
+ watchdog==6.0.0
105
+ wcwidth==0.2.14
106
+ wheel==0.45.1
107
+ wordcloud==1.9.4
108
+ xyzservices==2025.4.0
109
+ ydata-profiling==4.18.0
110
+ zipp==3.23.0
src/streamlit_app.py DELETED
@@ -1,40 +0,0 @@
1
- import altair as alt
2
- import numpy as np
3
- import pandas as pd
4
- import streamlit as st
5
-
6
- """
7
- # Welcome to Streamlit!
8
-
9
- Edit `/streamlit_app.py` to customize this app to your heart's desire :heart:.
10
- If you have any questions, checkout our [documentation](https://docs.streamlit.io) and [community
11
- forums](https://discuss.streamlit.io).
12
-
13
- In the meantime, below is an example of what you can do with just a few lines of code:
14
- """
15
-
16
- num_points = st.slider("Number of points in spiral", 1, 10000, 1100)
17
- num_turns = st.slider("Number of turns in spiral", 1, 300, 31)
18
-
19
- indices = np.linspace(0, 1, num_points)
20
- theta = 2 * np.pi * num_turns * indices
21
- radius = indices
22
-
23
- x = radius * np.cos(theta)
24
- y = radius * np.sin(theta)
25
-
26
- df = pd.DataFrame({
27
- "x": x,
28
- "y": y,
29
- "idx": indices,
30
- "rand": np.random.randn(num_points),
31
- })
32
-
33
- st.altair_chart(alt.Chart(df, height=700, width=700)
34
- .mark_point(filled=True)
35
- .encode(
36
- x=alt.X("x", axis=None),
37
- y=alt.Y("y", axis=None),
38
- color=alt.Color("idx", legend=None, scale=alt.Scale()),
39
- size=alt.Size("rand", legend=None, scale=alt.Scale(range=[1, 150])),
40
- ))