diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..ac17a2494a4c2c70767b50d01336c40b92f5ba44 --- /dev/null +++ b/.gitignore @@ -0,0 +1,27 @@ +target +local +__pycache__ +.pytest_cache +/*.egg-info +/**/dist +/**/*.egg-info +/**/*-stubs +.venv + +# Eclipse, Netbeans and IntelliJ files +/.* +!/.github +!/.ci +!.gitignore +!.gitattributes +!/.mvn +/nbproject +*.ipr +*.iws +*.iml + +# Repository wide ignore mac DS_Store files +.DS_Store +*.code-workspace +CLAUDE.md +DOCUMENTATION_AUDIT.md diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000000000000000000000000000000000000..c5369cea95331a0f2c43393789128ddcbdcf1920 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,24 @@ +# Use Python 3.12 base image +FROM python:3.12 + +# Install JDK 21 (required for solverforge-legacy) +RUN apt-get update && \ + apt-get install -y wget gnupg2 && \ + wget -O- https://packages.adoptium.net/artifactory/api/gpg/key/public | gpg --dearmor > /usr/share/keyrings/adoptium-archive-keyring.gpg && \ + echo "deb [signed-by=/usr/share/keyrings/adoptium-archive-keyring.gpg] https://packages.adoptium.net/artifactory/deb bookworm main" > /etc/apt/sources.list.d/adoptium.list && \ + apt-get update && \ + apt-get install -y temurin-21-jdk && \ + apt-get clean && \ + rm -rf /var/lib/apt/lists/* + +# Copy application files +COPY . . + +# Install the application +RUN pip install --no-cache-dir -e . + +# Expose port 8080 +EXPOSE 8080 + +# Run the application +CMD ["run-app"] diff --git a/README.md b/README.md index c04f687e553e25ae06e94a99b46a4fd220917559..4dff5f92b2c13d668e824abbf75d3e9159d09367 100644 --- a/README.md +++ b/README.md @@ -1,12 +1,79 @@ --- -title: Employee Scheduling Python +title: Employee Scheduling (Python) emoji: 👀 colorFrom: gray colorTo: green sdk: docker +app_port: 8080 pinned: false license: apache-2.0 short_description: SolverForge Quickstart for the Employee Scheduling problem --- -Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference +# Employee Scheduling (Python) + +Schedule shifts to employees, accounting for employee availability and shift skill requirements. + +- [Prerequisites](#prerequisites) +- [Run the application](#run-the-application) +- [Test the application](#test-the-application) + +## Prerequisites + +1. Install [Python 3.11 or 3.12](https://www.python.org/downloads/). + +2. Install JDK 17+, for example with [Sdkman](https://sdkman.io): + + ```sh + $ sdk install java + ``` + +## Run the application + +1. Git clone the solverforge-solver-python repo and navigate to this directory: + + ```sh + $ git clone https://github.com/SolverForge/solverforge-quickstarts.git + ... + $ cd solverforge-quickstarts/employee-scheduling-fast + ``` + +2. Create a virtual environment: + + ```sh + $ python -m venv .venv + ``` + +3. Activate the virtual environment: + + ```sh + $ . .venv/bin/activate + ``` + +4. Install the application: + + ```sh + $ pip install -e . + ``` + +5. Run the application: + + ```sh + $ run-app + ``` + +6. Visit [http://localhost:8080](http://localhost:8080) in your browser. + +7. Click on the **Solve** button. + +## Test the application + +1. Run tests: + + ```sh + $ pytest + ``` + +## More information + +Visit [solverforge.org](https://www.solverforge.org). diff --git a/deploy/employee-scheduling/Chart.yaml b/deploy/employee-scheduling/Chart.yaml new file mode 100644 index 0000000000000000000000000000000000000000..1d0f3318576c1f9699f209887e8fc0765137daae --- /dev/null +++ b/deploy/employee-scheduling/Chart.yaml @@ -0,0 +1,6 @@ +apiVersion: v2 +name: employee-scheduling +description: A Helm chart for Employee Scheduling application +type: application +version: 1.0.1 +appVersion: "1.0.1" diff --git a/deploy/employee-scheduling/templates/_helpers.tpl b/deploy/employee-scheduling/templates/_helpers.tpl new file mode 100644 index 0000000000000000000000000000000000000000..88f76509b4fd138a67697463ff7639d11a92c4e8 --- /dev/null +++ b/deploy/employee-scheduling/templates/_helpers.tpl @@ -0,0 +1,49 @@ +{{/* +Expand the name of the chart. +*/}} +{{- define "employee-scheduling.name" -}} +{{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" }} +{{- end }} + +{{/* +Create a default fully qualified app name. +*/}} +{{- define "employee-scheduling.fullname" -}} +{{- if .Values.fullnameOverride }} +{{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" }} +{{- else }} +{{- $name := default .Chart.Name .Values.nameOverride }} +{{- if contains $name .Release.Name }} +{{- .Release.Name | trunc 63 | trimSuffix "-" }} +{{- else }} +{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" }} +{{- end }} +{{- end }} +{{- end }} + +{{/* +Create chart name and version as used by the chart label. +*/}} +{{- define "employee-scheduling.chart" -}} +{{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" }} +{{- end }} + +{{/* +Common labels +*/}} +{{- define "employee-scheduling.labels" -}} +helm.sh/chart: {{ include "employee-scheduling.chart" . }} +{{ include "employee-scheduling.selectorLabels" . }} +{{- if .Chart.AppVersion }} +app.kubernetes.io/version: {{ .Chart.AppVersion | quote }} +{{- end }} +app.kubernetes.io/managed-by: {{ .Release.Service }} +{{- end }} + +{{/* +Selector labels +*/}} +{{- define "employee-scheduling.selectorLabels" -}} +app.kubernetes.io/name: {{ include "employee-scheduling.name" . }} +app.kubernetes.io/instance: {{ .Release.Name }} +{{- end }} diff --git a/deploy/employee-scheduling/templates/deployment.yaml b/deploy/employee-scheduling/templates/deployment.yaml new file mode 100644 index 0000000000000000000000000000000000000000..f3286ee7b7289a44efe001239267fbd71b858f84 --- /dev/null +++ b/deploy/employee-scheduling/templates/deployment.yaml @@ -0,0 +1,70 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: {{ include "employee-scheduling.fullname" . }} + labels: + {{- include "employee-scheduling.labels" . | nindent 4 }} +spec: + replicas: {{ .Values.replicaCount }} + selector: + matchLabels: + {{- include "employee-scheduling.selectorLabels" . | nindent 6 }} + template: + metadata: + labels: + {{- include "employee-scheduling.selectorLabels" . | nindent 8 }} + {{- with .Values.podAnnotations }} + annotations: + {{- toYaml . | nindent 8 }} + {{- end }} + spec: + {{- if .Values.imagePullSecrets }} + imagePullSecrets: + {{- toYaml .Values.imagePullSecrets | nindent 8 }} + {{- end }} + {{- with .Values.podSecurityContext }} + securityContext: + {{- toYaml . | nindent 8 }} + {{- end }} + containers: + - name: {{ include "employee-scheduling.name" . }} + image: "{{ .Values.image.repository }}:{{ .Values.image.tag | default .Chart.AppVersion }}" + imagePullPolicy: {{ .Values.image.pullPolicy }} + ports: + - name: http + containerPort: {{ .Values.service.port }} + protocol: TCP + {{- with .Values.securityContext }} + securityContext: + {{- toYaml . | nindent 12 }} + {{- end }} + {{- if .Values.resources }} + resources: + {{- toYaml .Values.resources | nindent 12 }} + {{- end }} + livenessProbe: + httpGet: + path: / + port: http + initialDelaySeconds: 10 + periodSeconds: 10 + failureThreshold: 3 + readinessProbe: + httpGet: + path: / + port: http + initialDelaySeconds: 5 + periodSeconds: 5 + failureThreshold: 3 + {{- if .Values.nodeSelector }} + nodeSelector: + {{- toYaml .Values.nodeSelector | nindent 8 }} + {{- end }} + {{- if .Values.tolerations }} + tolerations: + {{- toYaml .Values.tolerations | nindent 8 }} + {{- end }} + {{- if .Values.affinity }} + affinity: + {{- toYaml .Values.affinity | nindent 8 }} + {{- end }} diff --git a/deploy/employee-scheduling/templates/service.yaml b/deploy/employee-scheduling/templates/service.yaml new file mode 100644 index 0000000000000000000000000000000000000000..069e47170d8c70ef0db84ccf83b1e5ac2d8d3fe6 --- /dev/null +++ b/deploy/employee-scheduling/templates/service.yaml @@ -0,0 +1,18 @@ +apiVersion: v1 +kind: Service +metadata: + name: {{ include "employee-scheduling.fullname" . }} + labels: + {{- include "employee-scheduling.labels" . | nindent 4 }} +spec: + type: {{ .Values.service.type }} + ports: + - port: {{ .Values.service.port }} + targetPort: http + protocol: TCP + name: http + {{- if .Values.service.nodePort }} + nodePort: {{ .Values.service.nodePort }} + {{- end }} + selector: + {{- include "employee-scheduling.selectorLabels" . | nindent 4 }} diff --git a/deploy/employee-scheduling/values.yaml b/deploy/employee-scheduling/values.yaml new file mode 100644 index 0000000000000000000000000000000000000000..70fc33ed6c80714a9fce936b9b04af6f2dedfb85 --- /dev/null +++ b/deploy/employee-scheduling/values.yaml @@ -0,0 +1,35 @@ +replicaCount: 1 + +image: + repository: employee-scheduling + pullPolicy: IfNotPresent + tag: "1.0.1" + +imagePullSecrets: [] +nameOverride: "" +fullnameOverride: "" + +podAnnotations: {} + +podSecurityContext: {} + +securityContext: {} + +service: + type: NodePort + port: 8080 + nodePort: 30081 + +resources: {} + +autoscaling: + enabled: false + minReplicas: 1 + maxReplicas: 100 + targetCPUUtilizationPercentage: 80 + +nodeSelector: {} + +tolerations: [] + +affinity: {} diff --git a/logging.conf b/logging.conf new file mode 100644 index 0000000000000000000000000000000000000000..b9dd947471674104ce71007ff163ba7dcd524084 --- /dev/null +++ b/logging.conf @@ -0,0 +1,30 @@ +[loggers] +keys=root,timefold_solver + +[handlers] +keys=consoleHandler + +[formatters] +keys=simpleFormatter + +[logger_root] +level=INFO +handlers=consoleHandler + +[logger_timefold_solver] +level=INFO +qualname=timefold.solver +handlers=consoleHandler +propagate=0 + +[handler_consoleHandler] +class=StreamHandler +level=INFO +formatter=simpleFormatter +args=(sys.stdout,) + +[formatter_simpleFormatter] +class=uvicorn.logging.ColourizedFormatter +format={levelprefix:<8} @ {name} : {message} +style={ +use_colors=True diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000000000000000000000000000000000000..11a5a8072554891cd1c8729c1d5a2e8db8585c6d --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,20 @@ +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + + +[project] +name = "employee_scheduling" +version = "1.0.1" +requires-python = ">=3.10" +dependencies = [ + 'solverforge-legacy == 1.24.1', + 'fastapi == 0.111.0', + 'pydantic == 2.7.3', + 'uvicorn == 0.30.1', + 'pytest == 8.2.2', +] + + +[project.scripts] +run-app = "employee_scheduling:main" diff --git a/src/employee_scheduling/__init__.py b/src/employee_scheduling/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..29391a89e0c2e374193d0f66c731abe6be7073b3 --- /dev/null +++ b/src/employee_scheduling/__init__.py @@ -0,0 +1,19 @@ +import uvicorn + +from .rest_api import app + + +def main(): + config = uvicorn.Config( + "employee_scheduling:app", + host="0.0.0.0", + port=8080, + log_config="logging.conf", + use_colors=True, + ) + server = uvicorn.Server(config) + server.run() + + +if __name__ == "__main__": + main() diff --git a/src/employee_scheduling/__pycache__/__init__.cpython-310.pyc b/src/employee_scheduling/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9fd2e9d312717c2733435c6d6214a7b203d89a70 Binary files /dev/null and b/src/employee_scheduling/__pycache__/__init__.cpython-310.pyc differ diff --git a/src/employee_scheduling/__pycache__/__init__.cpython-312.pyc b/src/employee_scheduling/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5e289a1aa626d22a797a00ef09e8b05c4fc58491 Binary files /dev/null and b/src/employee_scheduling/__pycache__/__init__.cpython-312.pyc differ diff --git a/src/employee_scheduling/__pycache__/__init__.cpython-313.pyc b/src/employee_scheduling/__pycache__/__init__.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..07c48ab8b98b78ccff9abdc84f86ce744e951de7 Binary files /dev/null and b/src/employee_scheduling/__pycache__/__init__.cpython-313.pyc differ diff --git a/src/employee_scheduling/__pycache__/constraints.cpython-310.pyc b/src/employee_scheduling/__pycache__/constraints.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d19535e63edb67df54d42b0557cdb9c00c57e9ca Binary files /dev/null and b/src/employee_scheduling/__pycache__/constraints.cpython-310.pyc differ diff --git a/src/employee_scheduling/__pycache__/constraints.cpython-312.pyc b/src/employee_scheduling/__pycache__/constraints.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7df752bbf52ab76b659defc997cd7c58ae0d0e84 Binary files /dev/null and b/src/employee_scheduling/__pycache__/constraints.cpython-312.pyc differ diff --git a/src/employee_scheduling/__pycache__/constraints.cpython-313.pyc b/src/employee_scheduling/__pycache__/constraints.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c2504a28aaa8710b35c37596da9799d53a257271 Binary files /dev/null and b/src/employee_scheduling/__pycache__/constraints.cpython-313.pyc differ diff --git a/src/employee_scheduling/__pycache__/converters.cpython-310.pyc b/src/employee_scheduling/__pycache__/converters.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ae13859cdd80274ba5e5e46aec4bf09a0de92d82 Binary files /dev/null and b/src/employee_scheduling/__pycache__/converters.cpython-310.pyc differ diff --git a/src/employee_scheduling/__pycache__/converters.cpython-312.pyc b/src/employee_scheduling/__pycache__/converters.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5e3fe5803dce3ac913e3786cf036c9a413b0606f Binary files /dev/null and b/src/employee_scheduling/__pycache__/converters.cpython-312.pyc differ diff --git a/src/employee_scheduling/__pycache__/demo_data.cpython-310.pyc b/src/employee_scheduling/__pycache__/demo_data.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5ba02724d015a0732bc25e7ec3ccd1c50be5c675 Binary files /dev/null and b/src/employee_scheduling/__pycache__/demo_data.cpython-310.pyc differ diff --git a/src/employee_scheduling/__pycache__/demo_data.cpython-312.pyc b/src/employee_scheduling/__pycache__/demo_data.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c7ef3541cc6da64770866133256f183be08fc4e1 Binary files /dev/null and b/src/employee_scheduling/__pycache__/demo_data.cpython-312.pyc differ diff --git a/src/employee_scheduling/__pycache__/domain.cpython-310.pyc b/src/employee_scheduling/__pycache__/domain.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c1038b834e25babad1a11e921d24b7b7c5920cff Binary files /dev/null and b/src/employee_scheduling/__pycache__/domain.cpython-310.pyc differ diff --git a/src/employee_scheduling/__pycache__/domain.cpython-312.pyc b/src/employee_scheduling/__pycache__/domain.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0b4977ea2d5e2fee63abe59dee0f2d24f1df96f8 Binary files /dev/null and b/src/employee_scheduling/__pycache__/domain.cpython-312.pyc differ diff --git a/src/employee_scheduling/__pycache__/domain.cpython-313.pyc b/src/employee_scheduling/__pycache__/domain.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..67884a8e760cc6afe1193747351ec53fb3c0368e Binary files /dev/null and b/src/employee_scheduling/__pycache__/domain.cpython-313.pyc differ diff --git a/src/employee_scheduling/__pycache__/json_serialization.cpython-310.pyc b/src/employee_scheduling/__pycache__/json_serialization.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a99d51c4c910f5d7c9f3318a628d966b425dd28e Binary files /dev/null and b/src/employee_scheduling/__pycache__/json_serialization.cpython-310.pyc differ diff --git a/src/employee_scheduling/__pycache__/json_serialization.cpython-312.pyc b/src/employee_scheduling/__pycache__/json_serialization.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..cff277a51b8e047bfee5335977b51e5d8ccceafc Binary files /dev/null and b/src/employee_scheduling/__pycache__/json_serialization.cpython-312.pyc differ diff --git a/src/employee_scheduling/__pycache__/rest_api.cpython-310.pyc b/src/employee_scheduling/__pycache__/rest_api.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9315d8e2fc8ebf3b64422ae267103cdf742c7eea Binary files /dev/null and b/src/employee_scheduling/__pycache__/rest_api.cpython-310.pyc differ diff --git a/src/employee_scheduling/__pycache__/rest_api.cpython-312.pyc b/src/employee_scheduling/__pycache__/rest_api.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2a96954b49aeb54200c1235d0b86a6c56ceaa6b5 Binary files /dev/null and b/src/employee_scheduling/__pycache__/rest_api.cpython-312.pyc differ diff --git a/src/employee_scheduling/__pycache__/solver.cpython-310.pyc b/src/employee_scheduling/__pycache__/solver.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c7ec039e2819074dc49ab9e52a8e36f66ae47581 Binary files /dev/null and b/src/employee_scheduling/__pycache__/solver.cpython-310.pyc differ diff --git a/src/employee_scheduling/__pycache__/solver.cpython-312.pyc b/src/employee_scheduling/__pycache__/solver.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ab83550ebc52c501c06208b34ae28e146460b6e4 Binary files /dev/null and b/src/employee_scheduling/__pycache__/solver.cpython-312.pyc differ diff --git a/src/employee_scheduling/constraints.py b/src/employee_scheduling/constraints.py new file mode 100644 index 0000000000000000000000000000000000000000..73079950b23f01dfa14520027cd90d339c57d12c --- /dev/null +++ b/src/employee_scheduling/constraints.py @@ -0,0 +1,213 @@ +from solverforge_legacy.solver.score import ( + constraint_provider, + ConstraintFactory, + Joiners, + HardSoftDecimalScore, + ConstraintCollectors, +) +from datetime import datetime, date + +from .domain import Employee, Shift + + +def get_minute_overlap(shift1: Shift, shift2: Shift) -> int: + return ( + min(shift1.end, shift2.end) - max(shift1.start, shift2.start) + ).total_seconds() // 60 + + +def is_overlapping_with_date(shift: Shift, dt: date) -> bool: + return shift.start.date() == dt or shift.end.date() == dt + + +def overlapping_in_minutes( + first_start_datetime: datetime, + first_end_datetime: datetime, + second_start_datetime: datetime, + second_end_datetime: datetime, +) -> int: + latest_start = max(first_start_datetime, second_start_datetime) + earliest_end = min(first_end_datetime, second_end_datetime) + delta = (earliest_end - latest_start).total_seconds() / 60 + return max(0, delta) + + +def get_shift_overlapping_duration_in_minutes(shift: Shift, dt: date) -> int: + overlap = 0 + start_date_time = datetime.combine(dt, datetime.min.time()) + end_date_time = datetime.combine(dt, datetime.max.time()) + overlap += overlapping_in_minutes( + start_date_time, end_date_time, shift.start, shift.end + ) + return overlap + + +@constraint_provider +def define_constraints(constraint_factory: ConstraintFactory): + return [ + # Hard constraints + required_skill(constraint_factory), + no_overlapping_shifts(constraint_factory), + at_least_10_hours_between_two_shifts(constraint_factory), + one_shift_per_day(constraint_factory), + unavailable_employee(constraint_factory), + max_shifts_per_employee(constraint_factory), + # Soft constraints + undesired_day_for_employee(constraint_factory), + desired_day_for_employee(constraint_factory), + balance_employee_shift_assignments(constraint_factory), + ] + + +def required_skill(constraint_factory: ConstraintFactory): + return ( + constraint_factory.for_each(Shift) + .filter(lambda shift: not shift.has_required_skill()) + .penalize(HardSoftDecimalScore.ONE_HARD) + .as_constraint("Missing required skill") + ) + + +def no_overlapping_shifts(constraint_factory: ConstraintFactory): + return ( + constraint_factory.for_each_unique_pair( + Shift, + Joiners.equal(lambda shift: shift.employee.name), + Joiners.overlapping(lambda shift: shift.start, lambda shift: shift.end), + ) + .penalize(HardSoftDecimalScore.ONE_HARD, get_minute_overlap) + .as_constraint("Overlapping shift") + ) + + +def at_least_10_hours_between_two_shifts(constraint_factory: ConstraintFactory): + return ( + constraint_factory.for_each(Shift) + .join( + Shift, + Joiners.equal(lambda shift: shift.employee.name), + Joiners.less_than_or_equal( + lambda shift: shift.end, lambda shift: shift.start + ), + ) + .filter( + lambda first_shift, second_shift: ( + second_shift.start - first_shift.end + ).total_seconds() + // (60 * 60) + < 10 + ) + .penalize( + HardSoftDecimalScore.ONE_HARD, + lambda first_shift, second_shift: 600 + - ((second_shift.start - first_shift.end).total_seconds() // 60), + ) + .as_constraint("At least 10 hours between 2 shifts") + ) + + +def one_shift_per_day(constraint_factory: ConstraintFactory): + return ( + constraint_factory.for_each_unique_pair( + Shift, + Joiners.equal(lambda shift: shift.employee.name), + Joiners.equal(lambda shift: shift.start.date()), + ) + .penalize(HardSoftDecimalScore.ONE_HARD) + .as_constraint("Max one shift per day") + ) + + +def unavailable_employee(constraint_factory: ConstraintFactory): + return ( + constraint_factory.for_each(Shift) + .join( + Employee, + Joiners.equal(lambda shift: shift.employee, lambda employee: employee), + ) + .flatten_last(lambda employee: employee.unavailable_dates) + .filter(lambda shift, unavailable_date: shift.is_overlapping_with_date(unavailable_date)) + .penalize( + HardSoftDecimalScore.ONE_HARD, + lambda shift, unavailable_date: shift.get_overlapping_duration_in_minutes(unavailable_date), + ) + .as_constraint("Unavailable employee") + ) + + +def max_shifts_per_employee(constraint_factory: ConstraintFactory): + """ + Hard constraint: No employee can have more than 12 shifts. + + The limit of 12 is chosen based on the demo data dimensions: + - SMALL dataset: 139 shifts / 15 employees = ~9.3 average + - This provides headroom while preventing extreme imbalance + + Note: A limit that's too low (e.g., 5) would make the problem infeasible. + Always ensure your constraints are compatible with your data dimensions. + """ + return ( + constraint_factory.for_each(Shift) + .group_by(lambda shift: shift.employee, ConstraintCollectors.count()) + .filter(lambda employee, shift_count: shift_count > 12) + .penalize( + HardSoftDecimalScore.ONE_HARD, + lambda employee, shift_count: shift_count - 12, + ) + .as_constraint("Max 12 shifts per employee") + ) + + +def undesired_day_for_employee(constraint_factory: ConstraintFactory): + return ( + constraint_factory.for_each(Shift) + .join( + Employee, + Joiners.equal(lambda shift: shift.employee, lambda employee: employee), + ) + .flatten_last(lambda employee: employee.undesired_dates) + .filter(lambda shift, undesired_date: shift.is_overlapping_with_date(undesired_date)) + .penalize( + HardSoftDecimalScore.ONE_SOFT, + lambda shift, undesired_date: shift.get_overlapping_duration_in_minutes(undesired_date), + ) + .as_constraint("Undesired day for employee") + ) + + +def desired_day_for_employee(constraint_factory: ConstraintFactory): + return ( + constraint_factory.for_each(Shift) + .join( + Employee, + Joiners.equal(lambda shift: shift.employee, lambda employee: employee), + ) + .flatten_last(lambda employee: employee.desired_dates) + .filter(lambda shift, desired_date: shift.is_overlapping_with_date(desired_date)) + .reward( + HardSoftDecimalScore.ONE_SOFT, + lambda shift, desired_date: shift.get_overlapping_duration_in_minutes(desired_date), + ) + .as_constraint("Desired day for employee") + ) + + +def balance_employee_shift_assignments(constraint_factory: ConstraintFactory): + return ( + constraint_factory.for_each(Shift) + .group_by(lambda shift: shift.employee, ConstraintCollectors.count()) + .complement( + Employee, lambda e: 0 + ) # Include all employees which are not assigned to any shift. + .group_by( + ConstraintCollectors.load_balance( + lambda employee, shift_count: employee, + lambda employee, shift_count: shift_count, + ) + ) + .penalize_decimal( + HardSoftDecimalScore.ONE_SOFT, + lambda load_balance: load_balance.unfairness(), + ) + .as_constraint("Balance employee shift assignments") + ) diff --git a/src/employee_scheduling/converters.py b/src/employee_scheduling/converters.py new file mode 100644 index 0000000000000000000000000000000000000000..5af394f09978c59041a7b39a182fe818e3f08993 --- /dev/null +++ b/src/employee_scheduling/converters.py @@ -0,0 +1,95 @@ +from typing import List, Optional, Union +from datetime import datetime, date +from . import domain +from .json_serialization import JsonDomainBase +from pydantic import Field + + +# Conversion functions from domain to API models +def employee_to_model(employee: domain.Employee) -> domain.EmployeeModel: + return domain.EmployeeModel( + name=employee.name, + skills=list(employee.skills), + unavailable_dates=[d.isoformat() for d in employee.unavailable_dates], + undesired_dates=[d.isoformat() for d in employee.undesired_dates], + desired_dates=[d.isoformat() for d in employee.desired_dates], + ) + + +def shift_to_model(shift: domain.Shift) -> domain.ShiftModel: + return domain.ShiftModel( + id=shift.id, + start=shift.start.isoformat(), + end=shift.end.isoformat(), + location=shift.location, + required_skill=shift.required_skill, + employee=employee_to_model(shift.employee) if shift.employee else None, + ) + + +def schedule_to_model( + schedule: domain.EmployeeSchedule, +) -> domain.EmployeeScheduleModel: + return domain.EmployeeScheduleModel( + employees=[employee_to_model(e) for e in schedule.employees], + shifts=[shift_to_model(s) for s in schedule.shifts], + score=str(schedule.score) if schedule.score else None, + solver_status=schedule.solver_status.name if schedule.solver_status else None, + ) + + +# Conversion functions from API models to domain +def model_to_employee(model: domain.EmployeeModel) -> domain.Employee: + return domain.Employee( + name=model.name, + skills=set(model.skills), + unavailable_dates={date.fromisoformat(d) for d in model.unavailable_dates}, + undesired_dates={date.fromisoformat(d) for d in model.undesired_dates}, + desired_dates={date.fromisoformat(d) for d in model.desired_dates}, + ) + + +def model_to_shift(model: domain.ShiftModel, employee_lookup: dict) -> domain.Shift: + # Handle employee reference + employee = None + if model.employee: + if isinstance(model.employee, str): + employee = employee_lookup[model.employee] + else: + employee = model_to_employee(model.employee) + + return domain.Shift( + id=model.id, + start=datetime.fromisoformat(model.start), + end=datetime.fromisoformat(model.end), + location=model.location, + required_skill=model.required_skill, + employee=employee, + ) + + +def model_to_schedule(model: domain.EmployeeScheduleModel) -> domain.EmployeeSchedule: + # Convert employees first + employees = [model_to_employee(e) for e in model.employees] + + # Create lookup dictionary for employee references + employee_lookup = {e.name: e for e in employees} + + # Convert shifts with employee lookups + shifts = [model_to_shift(s, employee_lookup) for s in model.shifts] + + # Handle score + score = None + if model.score: + from solverforge_legacy.solver.score import HardSoftDecimalScore + + score = HardSoftDecimalScore.parse(model.score) + + # Handle solver status + solver_status = domain.SolverStatus.NOT_SOLVING + if model.solver_status: + solver_status = domain.SolverStatus[model.solver_status] + + return domain.EmployeeSchedule( + employees=employees, shifts=shifts, score=score, solver_status=solver_status + ) diff --git a/src/employee_scheduling/demo_data.py b/src/employee_scheduling/demo_data.py new file mode 100644 index 0000000000000000000000000000000000000000..4c8bc1f7f26155cf8505a4a693e846c94a2a6ab5 --- /dev/null +++ b/src/employee_scheduling/demo_data.py @@ -0,0 +1,228 @@ +from datetime import date, datetime, time, timedelta +from itertools import product +from enum import Enum +from random import Random +from typing import Generator +from dataclasses import dataclass, field + +from .domain import * + + +class DemoData(Enum): + SMALL = 'SMALL' + LARGE = 'LARGE' + + +@dataclass(frozen=True, kw_only=True) +class CountDistribution: + count: int + weight: float + + +def counts(distributions: tuple[CountDistribution, ...]) -> tuple[int, ...]: + return tuple(distribution.count for distribution in distributions) + + +def weights(distributions: tuple[CountDistribution, ...]) -> tuple[float, ...]: + return tuple(distribution.weight for distribution in distributions) + + +@dataclass(kw_only=True) +class DemoDataParameters: + locations: tuple[str, ...] + required_skills: tuple[str, ...] + optional_skills: tuple[str, ...] + days_in_schedule: int + employee_count: int + optional_skill_distribution: tuple[CountDistribution, ...] + shift_count_distribution: tuple[CountDistribution, ...] + availability_count_distribution: tuple[CountDistribution, ...] + random_seed: int = field(default=37) + + +demo_data_to_parameters: dict[DemoData, DemoDataParameters] = { + DemoData.SMALL: DemoDataParameters( + locations=("Ambulatory care", "Critical care", "Pediatric care"), + required_skills=("Doctor", "Nurse"), + optional_skills=("Anaesthetics", "Cardiology"), + days_in_schedule=14, + employee_count=15, + optional_skill_distribution=( + CountDistribution(count=1, weight=3), + CountDistribution(count=2, weight=1) + ), + shift_count_distribution=( + CountDistribution(count=1, weight=0.9), + CountDistribution(count=2, weight=0.1) + ), + availability_count_distribution=( + CountDistribution(count=1, weight=4), + CountDistribution(count=2, weight=3), + CountDistribution(count=3, weight=2), + CountDistribution(count=4, weight=1) + ), + random_seed=37 + ), + + DemoData.LARGE: DemoDataParameters( + locations=("Ambulatory care", + "Neurology", + "Critical care", + "Pediatric care", + "Surgery", + "Radiology", + "Outpatient"), + required_skills=("Doctor", "Nurse"), + optional_skills=("Anaesthetics", "Cardiology", "Radiology"), + days_in_schedule=28, + employee_count=50, + optional_skill_distribution=( + CountDistribution(count=1, weight=3), + CountDistribution(count=2, weight=1) + ), + shift_count_distribution=( + CountDistribution(count=1, weight=0.5), + CountDistribution(count=2, weight=0.3), + CountDistribution(count=3, weight=0.2) + ), + availability_count_distribution=( + CountDistribution(count=5, weight=4), + CountDistribution(count=10, weight=3), + CountDistribution(count=15, weight=2), + CountDistribution(count=20, weight=1) + ), + random_seed=37 + ) +} + + +FIRST_NAMES = ("Amy", "Beth", "Carl", "Dan", "Elsa", "Flo", "Gus", "Hugo", "Ivy", "Jay") +LAST_NAMES = ("Cole", "Fox", "Green", "Jones", "King", "Li", "Poe", "Rye", "Smith", "Watt") +SHIFT_LENGTH = timedelta(hours=8) +MORNING_SHIFT_START_TIME = time(hour=6, minute=0) +DAY_SHIFT_START_TIME = time(hour=9, minute=0) +AFTERNOON_SHIFT_START_TIME = time(hour=14, minute=0) +NIGHT_SHIFT_START_TIME = time(hour=22, minute=0) + +SHIFT_START_TIMES_COMBOS = ( + (MORNING_SHIFT_START_TIME, AFTERNOON_SHIFT_START_TIME), + (MORNING_SHIFT_START_TIME, AFTERNOON_SHIFT_START_TIME, NIGHT_SHIFT_START_TIME), + (MORNING_SHIFT_START_TIME, DAY_SHIFT_START_TIME, AFTERNOON_SHIFT_START_TIME, NIGHT_SHIFT_START_TIME), +) + + +location_to_shift_start_time_list_map = dict() + + +def earliest_monday_on_or_after(target_date: date): + """ + Returns the date of the next given weekday after + the given date. For example, the date of next Monday. + + NB: if it IS the day we're looking for, this returns 0. + consider then doing onDay(foo, day + 1). + """ + days = (7 - target_date.weekday()) % 7 + return target_date + timedelta(days=days) + + +def generate_demo_data(demo_data_or_parameters: DemoData | DemoDataParameters) -> EmployeeSchedule: + global location_to_shift_start_time_list_map, demo_data_to_parameters + if isinstance(demo_data_or_parameters, DemoData): + parameters = demo_data_to_parameters[demo_data_or_parameters] + else: + parameters = demo_data_or_parameters + + start_date = earliest_monday_on_or_after(date.today()) + random = Random(parameters.random_seed) + shift_template_index = 0 + for location in parameters.locations: + location_to_shift_start_time_list_map[location] = SHIFT_START_TIMES_COMBOS[shift_template_index] + shift_template_index = (shift_template_index + 1) % len(SHIFT_START_TIMES_COMBOS) + + name_permutations = [f'{first_name} {last_name}' + for first_name, last_name in product(FIRST_NAMES, LAST_NAMES)] + random.shuffle(name_permutations) + + employees = [] + for i in range(parameters.employee_count): + count, = random.choices(population=counts(parameters.optional_skill_distribution), + weights=weights(parameters.optional_skill_distribution)) + skills = [] + skills += random.sample(parameters.optional_skills, count) + skills += random.sample(parameters.required_skills, 1) + employees.append( + Employee(name=name_permutations[i], + skills=set(skills)) + ) + + shifts: list[Shift] = [] + + def id_generator(): + current_id = 0 + while True: + yield str(current_id) + current_id += 1 + + ids = id_generator() + + for i in range(parameters.days_in_schedule): + count, = random.choices(population=counts(parameters.availability_count_distribution), + weights=weights(parameters.availability_count_distribution)) + employees_with_availabilities_on_day = random.sample(employees, count) + current_date = start_date + timedelta(days=i) + for employee in employees_with_availabilities_on_day: + rand_num = random.randint(0, 2) + if rand_num == 0: + employee.unavailable_dates.add(current_date) + elif rand_num == 1: + employee.undesired_dates.add(current_date) + elif rand_num == 2: + employee.desired_dates.add(current_date) + shifts += generate_shifts_for_day(parameters, current_date, random, ids) + + shift_count = 0 + for shift in shifts: + shift.id = str(shift_count) + shift_count += 1 + + return EmployeeSchedule( + employees=employees, + shifts=shifts + ) + + +def generate_shifts_for_day(parameters: DemoDataParameters, current_date: date, random: Random, + ids: Generator[str, any, any]) -> list[Shift]: + global location_to_shift_start_time_list_map + shifts = [] + for location in parameters.locations: + shift_start_times = location_to_shift_start_time_list_map[location] + for start_time in shift_start_times: + shift_start_date_time = datetime.combine(current_date, start_time) + shift_end_date_time = shift_start_date_time + SHIFT_LENGTH + shifts += generate_shifts_for_timeslot(parameters, shift_start_date_time, shift_end_date_time, + location, random, ids) + + return shifts + + +def generate_shifts_for_timeslot(parameters: DemoDataParameters, timeslot_start: datetime, timeslot_end: datetime, + location: str, random: Random, ids: Generator[str, any, any]) -> list[Shift]: + shift_count, = random.choices(population=counts(parameters.shift_count_distribution), + weights=weights(parameters.shift_count_distribution)) + + shifts = [] + for i in range(shift_count): + if random.random() >= 0.5: + required_skill = random.choice(parameters.required_skills) + else: + required_skill = random.choice(parameters.optional_skills) + shifts.append(Shift( + id=next(ids), + start=timeslot_start, + end=timeslot_end, + location=location, + required_skill=required_skill)) + + return shifts diff --git a/src/employee_scheduling/domain.py b/src/employee_scheduling/domain.py new file mode 100644 index 0000000000000000000000000000000000000000..4622ab8c9b9c3d4cf1060033b5c0b6112854213e --- /dev/null +++ b/src/employee_scheduling/domain.py @@ -0,0 +1,95 @@ +from solverforge_legacy.solver import SolverStatus +from solverforge_legacy.solver.domain import ( + planning_entity, + planning_solution, + PlanningId, + PlanningVariable, + PlanningEntityCollectionProperty, + ProblemFactCollectionProperty, + ValueRangeProvider, + PlanningScore, +) +from solverforge_legacy.solver.score import HardSoftDecimalScore +from datetime import datetime, date +from typing import Annotated, List, Optional, Union +from dataclasses import dataclass, field +from .json_serialization import JsonDomainBase +from pydantic import Field + + +@dataclass +class Employee: + name: Annotated[str, PlanningId] + skills: set[str] = field(default_factory=set) + unavailable_dates: set[date] = field(default_factory=set) + undesired_dates: set[date] = field(default_factory=set) + desired_dates: set[date] = field(default_factory=set) + + +@planning_entity +@dataclass +class Shift: + id: Annotated[str, PlanningId] + start: datetime + end: datetime + location: str + required_skill: str + employee: Annotated[Employee | None, PlanningVariable] = None + + def has_required_skill(self) -> bool: + """Check if assigned employee has the required skill.""" + if self.employee is None: + return False + return self.required_skill in self.employee.skills + + def is_overlapping_with_date(self, dt: date) -> bool: + """Check if shift overlaps with a specific date.""" + return self.start.date() == dt or self.end.date() == dt + + def get_overlapping_duration_in_minutes(self, dt: date) -> int: + """Calculate overlap duration in minutes for a specific date.""" + start_date_time = datetime.combine(dt, datetime.min.time()) + end_date_time = datetime.combine(dt, datetime.max.time()) + + # Calculate overlap between date range and shift range + max_start_time = max(start_date_time, self.start) + min_end_time = min(end_date_time, self.end) + + minutes = (min_end_time - max_start_time).total_seconds() / 60 + return int(max(0, minutes)) + + +@planning_solution +@dataclass +class EmployeeSchedule: + employees: Annotated[ + list[Employee], ProblemFactCollectionProperty, ValueRangeProvider + ] + shifts: Annotated[list[Shift], PlanningEntityCollectionProperty] + score: Annotated[HardSoftDecimalScore | None, PlanningScore] = None + solver_status: SolverStatus = SolverStatus.NOT_SOLVING + + +# Pydantic REST models for API (used for deserialization and context) +class EmployeeModel(JsonDomainBase): + name: str + skills: List[str] = Field(default_factory=list) + unavailable_dates: List[str] = Field(default_factory=list, alias="unavailableDates") + undesired_dates: List[str] = Field(default_factory=list, alias="undesiredDates") + desired_dates: List[str] = Field(default_factory=list, alias="desiredDates") + + +class ShiftModel(JsonDomainBase): + id: str + start: str # ISO datetime string + end: str # ISO datetime string + location: str + required_skill: str = Field(..., alias="requiredSkill") + employee: Union[str, EmployeeModel, None] = None + + +class EmployeeScheduleModel(JsonDomainBase): + employees: List[EmployeeModel] + shifts: List[ShiftModel] + score: Optional[str] = None + solver_status: Optional[str] = None diff --git a/src/employee_scheduling/json_serialization.py b/src/employee_scheduling/json_serialization.py new file mode 100644 index 0000000000000000000000000000000000000000..a919e96af191a3203c817c2a17ae15d5f2b24b46 --- /dev/null +++ b/src/employee_scheduling/json_serialization.py @@ -0,0 +1,27 @@ +from solverforge_legacy.solver.score import HardSoftDecimalScore +from typing import Any +from pydantic import BaseModel, ConfigDict, PlainSerializer, BeforeValidator +from pydantic.alias_generators import to_camel + +ScoreSerializer = PlainSerializer( + lambda score: str(score) if score is not None else None, return_type=str | None +) + + +def validate_score(v: Any) -> Any: + if isinstance(v, HardSoftDecimalScore) or v is None: + return v + if isinstance(v, str): + return HardSoftDecimalScore.parse(v) + raise ValueError('"score" should be a string') + + +ScoreValidator = BeforeValidator(validate_score) + + +class JsonDomainBase(BaseModel): + model_config = ConfigDict( + alias_generator=to_camel, + populate_by_name=True, + from_attributes=True, + ) diff --git a/src/employee_scheduling/rest_api.py b/src/employee_scheduling/rest_api.py new file mode 100644 index 0000000000000000000000000000000000000000..8ff49cd068e30d13caa347a90c0573c6277a22c6 --- /dev/null +++ b/src/employee_scheduling/rest_api.py @@ -0,0 +1,56 @@ +from fastapi import FastAPI +from fastapi.staticfiles import StaticFiles +from uuid import uuid4 +from dataclasses import replace + +from .domain import EmployeeSchedule, EmployeeScheduleModel +from .converters import ( + schedule_to_model, model_to_schedule +) +from .demo_data import DemoData, generate_demo_data +from .solver import solver_manager, solution_manager + +app = FastAPI(docs_url='/q/swagger-ui') +data_sets: dict[str, EmployeeSchedule] = {} + + +@app.get("/demo-data") +async def demo_data_list() -> list[DemoData]: + return [e for e in DemoData] + + +@app.get("/demo-data/{dataset_id}", response_model_exclude_none=True) +async def get_demo_data(dataset_id: str) -> EmployeeScheduleModel: + demo_data = getattr(DemoData, dataset_id) + domain_schedule = generate_demo_data(demo_data) + return schedule_to_model(domain_schedule) + + +@app.get("/schedules/{problem_id}", response_model_exclude_none=True) +async def get_timetable(problem_id: str) -> EmployeeScheduleModel: + schedule = data_sets[problem_id] + updated_schedule = replace(schedule, solver_status=solver_manager.get_solver_status(problem_id)) + return schedule_to_model(updated_schedule) + + +def update_schedule(problem_id: str, schedule: EmployeeSchedule): + global data_sets + data_sets[problem_id] = schedule + + +@app.post("/schedules") +async def solve_timetable(schedule_model: EmployeeScheduleModel) -> str: + job_id = str(uuid4()) + schedule = model_to_schedule(schedule_model) + data_sets[job_id] = schedule + solver_manager.solve_and_listen(job_id, schedule, + lambda solution: update_schedule(job_id, solution)) + return job_id + + +@app.delete("/schedules/{problem_id}") +async def stop_solving(problem_id: str) -> None: + solver_manager.terminate_early(problem_id) + + +app.mount("/", StaticFiles(directory="static", html=True), name="static") diff --git a/src/employee_scheduling/solver.py b/src/employee_scheduling/solver.py new file mode 100644 index 0000000000000000000000000000000000000000..54292a16ed1f310cd49c698cb46a3f98389d8225 --- /dev/null +++ b/src/employee_scheduling/solver.py @@ -0,0 +1,23 @@ +from solverforge_legacy.solver import SolverManager, SolverFactory, SolutionManager +from solverforge_legacy.solver.config import ( + SolverConfig, + ScoreDirectorFactoryConfig, + TerminationConfig, + Duration, +) + +from .domain import EmployeeSchedule, Shift +from .constraints import define_constraints + + +solver_config = SolverConfig( + solution_class=EmployeeSchedule, + entity_class_list=[Shift], + score_director_factory_config=ScoreDirectorFactoryConfig( + constraint_provider_function=define_constraints + ), + termination_config=TerminationConfig(spent_limit=Duration(seconds=30)), +) + +solver_manager = SolverManager.create(SolverFactory.create(solver_config)) +solution_manager = SolutionManager.create(solver_manager) diff --git a/static/app.js b/static/app.js new file mode 100644 index 0000000000000000000000000000000000000000..f246031bb4e5f8ca09c4b4c8221b1a4da817500a --- /dev/null +++ b/static/app.js @@ -0,0 +1,520 @@ +let autoRefreshIntervalId = null; +const zoomMin = 2 * 1000 * 60 * 60 * 24 // 2 day in milliseconds +const zoomMax = 4 * 7 * 1000 * 60 * 60 * 24 // 4 weeks in milliseconds + +const UNAVAILABLE_COLOR = '#ef2929' // Tango Scarlet Red +const UNDESIRED_COLOR = '#f57900' // Tango Orange +const DESIRED_COLOR = '#73d216' // Tango Chameleon + +let demoDataId = null; +let scheduleId = null; +let loadedSchedule = null; + +const byEmployeePanel = document.getElementById("byEmployeePanel"); +const byEmployeeTimelineOptions = { + timeAxis: {scale: "hour", step: 6}, + orientation: {axis: "top"}, + stack: false, + xss: {disabled: true}, // Items are XSS safe through JQuery + zoomMin: zoomMin, + zoomMax: zoomMax, +}; +let byEmployeeGroupDataSet = new vis.DataSet(); +let byEmployeeItemDataSet = new vis.DataSet(); +let byEmployeeTimeline = new vis.Timeline(byEmployeePanel, byEmployeeItemDataSet, byEmployeeGroupDataSet, byEmployeeTimelineOptions); + +const byLocationPanel = document.getElementById("byLocationPanel"); +const byLocationTimelineOptions = { + timeAxis: {scale: "hour", step: 6}, + orientation: {axis: "top"}, + xss: {disabled: true}, // Items are XSS safe through JQuery + zoomMin: zoomMin, + zoomMax: zoomMax, +}; +let byLocationGroupDataSet = new vis.DataSet(); +let byLocationItemDataSet = new vis.DataSet(); +let byLocationTimeline = new vis.Timeline(byLocationPanel, byLocationItemDataSet, byLocationGroupDataSet, byLocationTimelineOptions); + +let windowStart = JSJoda.LocalDate.now().toString(); +let windowEnd = JSJoda.LocalDate.parse(windowStart).plusDays(7).toString(); + +$(document).ready(function () { + let initialized = false; + + function safeInitialize() { + if (!initialized) { + initialized = true; + initializeApp(); + } + } + + // Ensure all resources are loaded before initializing + $(window).on('load', safeInitialize); + + // Fallback if window load event doesn't fire + setTimeout(safeInitialize, 100); +}); + +function initializeApp() { + replaceQuickstartSolverForgeAutoHeaderFooter(); + + $("#solveButton").click(function () { + solve(); + }); + $("#stopSolvingButton").click(function () { + stopSolving(); + }); + $("#analyzeButton").click(function () { + analyze(); + }); + // HACK to allow vis-timeline to work within Bootstrap tabs + $("#byEmployeeTab").on('shown.bs.tab', function (event) { + byEmployeeTimeline.redraw(); + }) + $("#byLocationTab").on('shown.bs.tab', function (event) { + byLocationTimeline.redraw(); + }) + + setupAjax(); + fetchDemoData(); +} + +function setupAjax() { + $.ajaxSetup({ + headers: { + 'Content-Type': 'application/json', + 'Accept': 'application/json,text/plain', // plain text is required by solve() returning UUID of the solver job + } + }); + // Extend jQuery to support $.put() and $.delete() + jQuery.each(["put", "delete"], function (i, method) { + jQuery[method] = function (url, data, callback, type) { + if (jQuery.isFunction(data)) { + type = type || callback; + callback = data; + data = undefined; + } + return jQuery.ajax({ + url: url, + type: method, + dataType: type, + data: data, + success: callback + }); + }; + }); +} + +function fetchDemoData() { + $.get("/demo-data", function (data) { + data.forEach(item => { + $("#testDataButton").append($('' + item + '')); + $("#" + item + "TestData").click(function () { + switchDataDropDownItemActive(item); + scheduleId = null; + demoDataId = item; + + refreshSchedule(); + }); + }); + demoDataId = data[0]; + switchDataDropDownItemActive(demoDataId); + refreshSchedule(); + }).fail(function (xhr, ajaxOptions, thrownError) { + // disable this page as there is no data + let $demo = $("#demo"); + $demo.empty(); + $demo.html("
No test data available
") + }); +} + +function switchDataDropDownItemActive(newItem) { + activeCssClass = "active"; + $("#testDataButton > a." + activeCssClass).removeClass(activeCssClass); + $("#" + newItem + "TestData").addClass(activeCssClass); +} + +function getShiftColor(shift, employee) { + const shiftStart = JSJoda.LocalDateTime.parse(shift.start); + const shiftStartDateString = shiftStart.toLocalDate().toString(); + const shiftEnd = JSJoda.LocalDateTime.parse(shift.end); + const shiftEndDateString = shiftEnd.toLocalDate().toString(); + if (employee.unavailableDates.includes(shiftStartDateString) || + // The contains() check is ignored for a shift end at midnight (00:00:00). + (shiftEnd.isAfter(shiftStart.toLocalDate().plusDays(1).atStartOfDay()) && + employee.unavailableDates.includes(shiftEndDateString))) { + return UNAVAILABLE_COLOR + } else if (employee.undesiredDates.includes(shiftStartDateString) || + // The contains() check is ignored for a shift end at midnight (00:00:00). + (shiftEnd.isAfter(shiftStart.toLocalDate().plusDays(1).atStartOfDay()) && + employee.undesiredDates.includes(shiftEndDateString))) { + return UNDESIRED_COLOR + } else if (employee.desiredDates.includes(shiftStartDateString) || + // The contains() check is ignored for a shift end at midnight (00:00:00). + (shiftEnd.isAfter(shiftStart.toLocalDate().plusDays(1).atStartOfDay()) && + employee.desiredDates.includes(shiftEndDateString))) { + return DESIRED_COLOR + } else { + return " #729fcf"; // Tango Sky Blue + } +} + +function refreshSchedule() { + let path = "/schedules/" + scheduleId; + if (scheduleId === null) { + if (demoDataId === null) { + alert("Please select a test data set."); + return; + } + + path = "/demo-data/" + demoDataId; + } + $.getJSON(path, function (schedule) { + loadedSchedule = schedule; + renderSchedule(schedule); + }) + .fail(function (xhr, ajaxOptions, thrownError) { + showError("Getting the schedule has failed.", xhr); + refreshSolvingButtons(false); + }); +} + +function renderSchedule(schedule) { + console.log('Rendering schedule:', schedule); + + if (!schedule) { + console.error('No schedule data provided to renderSchedule'); + return; + } + + refreshSolvingButtons(schedule.solverStatus != null && schedule.solverStatus !== "NOT_SOLVING"); + $("#score").text("Score: " + (schedule.score == null ? "?" : schedule.score)); + + const unassignedShifts = $("#unassignedShifts"); + const groups = []; + + // Check if schedule.shifts exists and is an array + if (!schedule.shifts || !Array.isArray(schedule.shifts) || schedule.shifts.length === 0) { + console.warn('No shifts data available in schedule'); + return; + } + + // Show only first 7 days of draft + const scheduleStart = schedule.shifts.map(shift => JSJoda.LocalDateTime.parse(shift.start).toLocalDate()).sort()[0].toString(); + const scheduleEnd = JSJoda.LocalDate.parse(scheduleStart).plusDays(7).toString(); + + windowStart = scheduleStart; + windowEnd = scheduleEnd; + + unassignedShifts.children().remove(); + let unassignedShiftsCount = 0; + byEmployeeGroupDataSet.clear(); + byLocationGroupDataSet.clear(); + + byEmployeeItemDataSet.clear(); + byLocationItemDataSet.clear(); + + // Check if schedule.employees exists and is an array + if (!schedule.employees || !Array.isArray(schedule.employees)) { + console.warn('No employees data available in schedule'); + return; + } + + schedule.employees.forEach((employee, index) => { + const employeeGroupElement = $('') + .append($(`)`) + .append(employee.name)) + .append($('') + .append($(employee.skills.map(skill => `${skill}`).join('')))); + byEmployeeGroupDataSet.add({id: employee.name, content: employeeGroupElement.html()}); + + employee.unavailableDates.forEach((rawDate, dateIndex) => { + const date = JSJoda.LocalDate.parse(rawDate) + const start = date.atStartOfDay().toString(); + const end = date.plusDays(1).atStartOfDay().toString(); + const byEmployeeShiftElement = $(``) + .append($(``).text("Unavailable")); + byEmployeeItemDataSet.add({ + id: "employee-" + index + "-unavailability-" + dateIndex, group: employee.name, + content: byEmployeeShiftElement.html(), + start: start, end: end, + type: "background", + style: "opacity: 0.5; background-color: " + UNAVAILABLE_COLOR, + }); + }); + employee.undesiredDates.forEach((rawDate, dateIndex) => { + const date = JSJoda.LocalDate.parse(rawDate) + const start = date.atStartOfDay().toString(); + const end = date.plusDays(1).atStartOfDay().toString(); + const byEmployeeShiftElement = $(``) + .append($(``).text("Undesired")); + byEmployeeItemDataSet.add({ + id: "employee-" + index + "-undesired-" + dateIndex, group: employee.name, + content: byEmployeeShiftElement.html(), + start: start, end: end, + type: "background", + style: "opacity: 0.5; background-color: " + UNDESIRED_COLOR, + }); + }); + employee.desiredDates.forEach((rawDate, dateIndex) => { + const date = JSJoda.LocalDate.parse(rawDate) + const start = date.atStartOfDay().toString(); + const end = date.plusDays(1).atStartOfDay().toString(); + const byEmployeeShiftElement = $(``) + .append($(``).text("Desired")); + byEmployeeItemDataSet.add({ + id: "employee-" + index + "-desired-" + dateIndex, group: employee.name, + content: byEmployeeShiftElement.html(), + start: start, end: end, + type: "background", + style: "opacity: 0.5; background-color: " + DESIRED_COLOR, + }); + }); + }); + + schedule.shifts.forEach((shift, index) => { + if (groups.indexOf(shift.location) === -1) { + groups.push(shift.location); + byLocationGroupDataSet.add({ + id: shift.location, + content: shift.location, + }); + } + + if (shift.employee == null) { + unassignedShiftsCount++; + + const byLocationShiftElement = $('') + .append($(`)`) + .append("Unassigned")) + .append($('') + .append($(`${shift.requiredSkill}`))); + + byLocationItemDataSet.add({ + id: 'shift-' + index, group: shift.location, + content: byLocationShiftElement.html(), + start: shift.start, end: shift.end, + style: "background-color: #EF292999" + }); + } else { + const skillColor = (shift.employee.skills.indexOf(shift.requiredSkill) === -1 ? '#ef2929' : '#8ae234'); + const byEmployeeShiftElement = $('') + .append($(`)`) + .append(shift.location)) + .append($('') + .append($(`${shift.requiredSkill}`))); + const byLocationShiftElement = $('') + .append($(`)`) + .append(shift.employee.name)) + .append($('') + .append($(`${shift.requiredSkill}`))); + + const shiftColor = getShiftColor(shift, shift.employee); + byEmployeeItemDataSet.add({ + id: 'shift-' + index, group: shift.employee.name, + content: byEmployeeShiftElement.html(), + start: shift.start, end: shift.end, + style: "background-color: " + shiftColor + }); + byLocationItemDataSet.add({ + id: 'shift-' + index, group: shift.location, + content: byLocationShiftElement.html(), + start: shift.start, end: shift.end, + style: "background-color: " + shiftColor + }); + } + }); + + + if (unassignedShiftsCount === 0) { + unassignedShifts.append($(``).text(`There are no unassigned shifts.`)); + } else { + unassignedShifts.append($(``).text(`There are ${unassignedShiftsCount} unassigned shifts.`)); + } + byEmployeeTimeline.setWindow(scheduleStart, scheduleEnd); + byLocationTimeline.setWindow(scheduleStart, scheduleEnd); +} + +function solve() { + if (!loadedSchedule) { + showError("No schedule data loaded. Please wait for the data to load or refresh the page."); + return; + } + + console.log('Sending schedule data for solving:', loadedSchedule); + $.post("/schedules", JSON.stringify(loadedSchedule), function (data) { + scheduleId = data; + refreshSolvingButtons(true); + }).fail(function (xhr, ajaxOptions, thrownError) { + showError("Start solving failed.", xhr); + refreshSolvingButtons(false); + }, + "text"); +} + +function analyze() { + new bootstrap.Modal("#scoreAnalysisModal").show() + const scoreAnalysisModalContent = $("#scoreAnalysisModalContent"); + scoreAnalysisModalContent.children().remove(); + if (loadedSchedule.score == null) { + scoreAnalysisModalContent.text("No score to analyze yet, please first press the 'solve' button."); + } else { + $('#scoreAnalysisScoreLabel').text(`(${loadedSchedule.score})`); + $.put("/schedules/analyze", JSON.stringify(loadedSchedule), function (scoreAnalysis) { + let constraints = scoreAnalysis.constraints; + constraints.sort((a, b) => { + let aComponents = getScoreComponents(a.score), bComponents = getScoreComponents(b.score); + if (aComponents.hard < 0 && bComponents.hard > 0) return -1; + if (aComponents.hard > 0 && bComponents.soft < 0) return 1; + if (Math.abs(aComponents.hard) > Math.abs(bComponents.hard)) { + return -1; + } else { + if (aComponents.medium < 0 && bComponents.medium > 0) return -1; + if (aComponents.medium > 0 && bComponents.medium < 0) return 1; + if (Math.abs(aComponents.medium) > Math.abs(bComponents.medium)) { + return -1; + } else { + if (aComponents.soft < 0 && bComponents.soft > 0) return -1; + if (aComponents.soft > 0 && bComponents.soft < 0) return 1; + + return Math.abs(bComponents.soft) - Math.abs(aComponents.soft); + } + } + }); + constraints.map((e) => { + let components = getScoreComponents(e.weight); + e.type = components.hard != 0 ? 'hard' : (components.medium != 0 ? 'medium' : 'soft'); + e.weight = components[e.type]; + let scores = getScoreComponents(e.score); + e.implicitScore = scores.hard != 0 ? scores.hard : (scores.medium != 0 ? scores.medium : scores.soft); + }); + scoreAnalysis.constraints = constraints; + + scoreAnalysisModalContent.children().remove(); + scoreAnalysisModalContent.text(""); + + const analysisTable = $(`Generate the optimal schedule for your employees.
+ +
+
+ curl -X GET -H 'Accept:application/json' http://localhost:8080/demo-data/SMALL -o sample.json
+
+
+ The POST operation returns a jobId that should be used in subsequent commands.
+
+ curl -X POST -H 'Content-Type:application/json' http://localhost:8080/schedules -d@sample.json
+
+
+
+
+ curl -X GET -H 'Accept:application/json' http://localhost:8080/schedules/{jobId}/status
+
+
+
+
+ curl -X GET -H 'Accept:application/json' http://localhost:8080/schedules/{jobId}
+
+
+
+
+ curl -X DELETE -H 'Accept:application/json' http://localhost:8080/schedules/{id}
+
+ `).text(serverErrorMessage + "\n\nCode: " + serverErrorCode + "\nError id: " + serverErrorId))
+ )
+ );
+ $("#notificationPanel").append(notification);
+ notification.toast({delay: 30000});
+ notification.toast('show');
+}
+
+// ****************************************************************************
+// Application info
+// ****************************************************************************
+
+function applicationInfo() {
+ $.getJSON("info", function (info) {
+ $("#applicationInfo").append("" + info.application + " (version: " + info.version + ", built at: " + info.built + ")");
+ }).fail(function (xhr, ajaxOptions, thrownError) {
+ console.warn("Unable to collect application information");
+ });
+}
+
+// ****************************************************************************
+// TangoColorFactory
+// ****************************************************************************
+
+const SEQUENCE_1 = [0x8AE234, 0xFCE94F, 0x729FCF, 0xE9B96E, 0xAD7FA8];
+const SEQUENCE_2 = [0x73D216, 0xEDD400, 0x3465A4, 0xC17D11, 0x75507B];
+
+var colorMap = new Map;
+var nextColorCount = 0;
+
+function pickColor(object) {
+ let color = colorMap[object];
+ if (color !== undefined) {
+ return color;
+ }
+ color = nextColor();
+ colorMap[object] = color;
+ return color;
+}
+
+function nextColor() {
+ let color;
+ let colorIndex = nextColorCount % SEQUENCE_1.length;
+ let shadeIndex = Math.floor(nextColorCount / SEQUENCE_1.length);
+ if (shadeIndex === 0) {
+ color = SEQUENCE_1[colorIndex];
+ } else if (shadeIndex === 1) {
+ color = SEQUENCE_2[colorIndex];
+ } else {
+ shadeIndex -= 3;
+ let floorColor = SEQUENCE_2[colorIndex];
+ let ceilColor = SEQUENCE_1[colorIndex];
+ let base = Math.floor((shadeIndex / 2) + 1);
+ let divisor = 2;
+ while (base >= divisor) {
+ divisor *= 2;
+ }
+ base = (base * 2) - divisor + 1;
+ let shadePercentage = base / divisor;
+ color = buildPercentageColor(floorColor, ceilColor, shadePercentage);
+ }
+ nextColorCount++;
+ return "#" + color.toString(16);
+}
+
+function buildPercentageColor(floorColor, ceilColor, shadePercentage) {
+ let red = (floorColor & 0xFF0000) + Math.floor(shadePercentage * ((ceilColor & 0xFF0000) - (floorColor & 0xFF0000))) & 0xFF0000;
+ let green = (floorColor & 0x00FF00) + Math.floor(shadePercentage * ((ceilColor & 0x00FF00) - (floorColor & 0x00FF00))) & 0x00FF00;
+ let blue = (floorColor & 0x0000FF) + Math.floor(shadePercentage * ((ceilColor & 0x0000FF) - (floorColor & 0x0000FF))) & 0x0000FF;
+ return red | green | blue;
+}
diff --git a/tests/__pycache__/test_constraints.cpython-310-pytest-8.2.2.pyc b/tests/__pycache__/test_constraints.cpython-310-pytest-8.2.2.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..11589cbf3d88ea197cf36e8fefeb71cc31582595
Binary files /dev/null and b/tests/__pycache__/test_constraints.cpython-310-pytest-8.2.2.pyc differ
diff --git a/tests/__pycache__/test_constraints.cpython-311-pytest-8.2.2.pyc b/tests/__pycache__/test_constraints.cpython-311-pytest-8.2.2.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..22f437d354cfbbeb1aadcf03595d2cf031c3648b
Binary files /dev/null and b/tests/__pycache__/test_constraints.cpython-311-pytest-8.2.2.pyc differ
diff --git a/tests/__pycache__/test_constraints.cpython-312-pytest-8.2.2.pyc b/tests/__pycache__/test_constraints.cpython-312-pytest-8.2.2.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..86cee6667908bdb3a0a835be8472bc4e8dae05ad
Binary files /dev/null and b/tests/__pycache__/test_constraints.cpython-312-pytest-8.2.2.pyc differ
diff --git a/tests/__pycache__/test_feasible.cpython-310-pytest-8.2.2.pyc b/tests/__pycache__/test_feasible.cpython-310-pytest-8.2.2.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..3bc94abe7fc7d23045e62f5f6e7c82dc6a3552c1
Binary files /dev/null and b/tests/__pycache__/test_feasible.cpython-310-pytest-8.2.2.pyc differ
diff --git a/tests/__pycache__/test_feasible.cpython-311-pytest-8.2.2.pyc b/tests/__pycache__/test_feasible.cpython-311-pytest-8.2.2.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..1a519df64aefdab4a6c85cfd285bf7e0b1be2337
Binary files /dev/null and b/tests/__pycache__/test_feasible.cpython-311-pytest-8.2.2.pyc differ
diff --git a/tests/__pycache__/test_feasible.cpython-312-pytest-8.2.2.pyc b/tests/__pycache__/test_feasible.cpython-312-pytest-8.2.2.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..6d6bb9e19014ccf7271675e5b362bd06e2b27173
Binary files /dev/null and b/tests/__pycache__/test_feasible.cpython-312-pytest-8.2.2.pyc differ
diff --git a/tests/test_constraints.py b/tests/test_constraints.py
new file mode 100644
index 0000000000000000000000000000000000000000..729aa84f6e5e3e07c5e4046e555b536bc7b1a75e
--- /dev/null
+++ b/tests/test_constraints.py
@@ -0,0 +1,724 @@
+from solverforge_legacy.solver.test import ConstraintVerifier
+
+from employee_scheduling.domain import *
+from employee_scheduling.constraints import *
+
+from datetime import date, datetime, time, timedelta
+
+DAY_1 = date(2021, 2, 1)
+DAY_3 = date(2021, 2, 3)
+DAY_START_TIME = datetime.combine(DAY_1, time(9, 0))
+DAY_END_TIME = datetime.combine(DAY_1, time(17, 0))
+AFTERNOON_START_TIME = datetime.combine(DAY_1, time(13, 0))
+AFTERNOON_END_TIME = datetime.combine(DAY_1, time(21, 0))
+
+constraint_verifier = ConstraintVerifier.build(
+ define_constraints, EmployeeSchedule, Shift
+)
+
+
+def test_required_skill():
+ employee = Employee(name="Amy")
+ (
+ constraint_verifier.verify_that(required_skill)
+ .given(
+ employee,
+ Shift(
+ id="1",
+ start=DAY_START_TIME,
+ end=DAY_END_TIME,
+ location="Location",
+ required_skill="Skill",
+ employee=employee,
+ ),
+ )
+ .penalizes(1)
+ )
+
+ employee = Employee(name="Beth", skills={"Skill"})
+ (
+ constraint_verifier.verify_that(required_skill)
+ .given(
+ employee,
+ Shift(
+ id="2",
+ start=DAY_START_TIME,
+ end=DAY_END_TIME,
+ location="Location",
+ required_skill="Skill",
+ employee=employee,
+ ),
+ )
+ .penalizes(0)
+ )
+
+
+def test_overlapping_shifts():
+ employee1 = Employee(name="Amy")
+ employee2 = Employee(name="Beth")
+ (
+ constraint_verifier.verify_that(no_overlapping_shifts)
+ .given(
+ employee1,
+ employee2,
+ Shift(
+ id="1",
+ start=DAY_START_TIME,
+ end=DAY_END_TIME,
+ location="Location",
+ required_skill="Skill",
+ employee=employee1,
+ ),
+ Shift(
+ id="2",
+ start=DAY_START_TIME,
+ end=DAY_END_TIME,
+ location="Location 2",
+ required_skill="Skill",
+ employee=employee1,
+ ),
+ )
+ .penalizes_by(timedelta(hours=8) // timedelta(minutes=1))
+ )
+
+ (
+ constraint_verifier.verify_that(no_overlapping_shifts)
+ .given(
+ employee1,
+ employee2,
+ Shift(
+ id="1",
+ start=DAY_START_TIME,
+ end=DAY_END_TIME,
+ location="Location",
+ required_skill="Skill",
+ employee=employee1,
+ ),
+ Shift(
+ id="2",
+ start=DAY_START_TIME,
+ end=DAY_END_TIME,
+ location="Location 2",
+ required_skill="Skill",
+ employee=employee2,
+ ),
+ )
+ .penalizes(0)
+ )
+
+ (
+ constraint_verifier.verify_that(no_overlapping_shifts)
+ .given(
+ employee1,
+ employee2,
+ Shift(
+ id="1",
+ start=DAY_START_TIME,
+ end=DAY_END_TIME,
+ location="Location",
+ required_skill="Skill",
+ employee=employee1,
+ ),
+ Shift(
+ id="2",
+ start=AFTERNOON_START_TIME,
+ end=AFTERNOON_END_TIME,
+ location="Location 2",
+ required_skill="Skill",
+ employee=employee1,
+ ),
+ )
+ .penalizes_by(timedelta(hours=4) // timedelta(minutes=1))
+ )
+
+
+def test_one_shift_per_day():
+ employee1 = Employee(name="Amy")
+ employee2 = Employee(name="Beth")
+ (
+ constraint_verifier.verify_that(no_overlapping_shifts)
+ .given(
+ employee1,
+ employee2,
+ Shift(
+ id="1",
+ start=DAY_START_TIME,
+ end=DAY_END_TIME,
+ location="Location",
+ required_skill="Skill",
+ employee=employee1,
+ ),
+ Shift(
+ id="2",
+ start=DAY_START_TIME,
+ end=DAY_END_TIME,
+ location="Location 2",
+ required_skill="Skill",
+ employee=employee1,
+ ),
+ )
+ .penalizes(1)
+ )
+
+ (
+ constraint_verifier.verify_that(no_overlapping_shifts)
+ .given(
+ employee1,
+ employee2,
+ Shift(
+ id="1",
+ start=DAY_START_TIME,
+ end=DAY_END_TIME,
+ location="Location",
+ required_skill="Skill",
+ employee=employee1,
+ ),
+ Shift(
+ id="2",
+ start=DAY_START_TIME,
+ end=DAY_END_TIME,
+ location="Location 2",
+ required_skill="Skill",
+ employee=employee2,
+ ),
+ )
+ .penalizes(0)
+ )
+
+ (
+ constraint_verifier.verify_that(no_overlapping_shifts)
+ .given(
+ employee1,
+ employee2,
+ Shift(
+ id="1",
+ start=DAY_START_TIME,
+ end=DAY_END_TIME,
+ location="Location",
+ required_skill="Skill",
+ employee=employee1,
+ ),
+ Shift(
+ id="2",
+ start=AFTERNOON_START_TIME,
+ end=AFTERNOON_END_TIME,
+ location="Location 2",
+ required_skill="Skill",
+ employee=employee1,
+ ),
+ )
+ .penalizes(1)
+ )
+
+ (
+ constraint_verifier.verify_that(no_overlapping_shifts)
+ .given(
+ employee1,
+ employee2,
+ Shift(
+ id="1",
+ start=DAY_START_TIME,
+ end=DAY_END_TIME,
+ location="Location",
+ required_skill="Skill",
+ employee=employee1,
+ ),
+ Shift(
+ id="2",
+ start=DAY_START_TIME + timedelta(days=1),
+ end=DAY_END_TIME + timedelta(days=1),
+ location="Location 2",
+ required_skill="Skill",
+ employee=employee1,
+ ),
+ )
+ .penalizes(0)
+ )
+
+
+def test_at_least_10_hours_between_shifts():
+ employee1 = Employee(name="Amy")
+ employee2 = Employee(name="Beth")
+
+ (
+ constraint_verifier.verify_that(at_least_10_hours_between_two_shifts)
+ .given(
+ employee1,
+ employee2,
+ Shift(
+ id="1",
+ start=DAY_START_TIME,
+ end=DAY_END_TIME,
+ location="Location",
+ required_skill="Skill",
+ employee=employee1,
+ ),
+ Shift(
+ id="2",
+ start=AFTERNOON_END_TIME,
+ end=DAY_START_TIME + timedelta(days=1),
+ location="Location 2",
+ required_skill="Skill",
+ employee=employee1,
+ ),
+ )
+ .penalizes_by(360)
+ )
+
+ (
+ constraint_verifier.verify_that(at_least_10_hours_between_two_shifts)
+ .given(
+ employee1,
+ employee2,
+ Shift(
+ id="1",
+ start=DAY_START_TIME,
+ end=DAY_END_TIME,
+ location="Location",
+ required_skill="Skill",
+ employee=employee1,
+ ),
+ Shift(
+ id="2",
+ start=DAY_END_TIME,
+ end=DAY_START_TIME + timedelta(days=1),
+ location="Location 2",
+ required_skill="Skill",
+ employee=employee1,
+ ),
+ )
+ .penalizes_by(600)
+ )
+
+ (
+ constraint_verifier.verify_that(at_least_10_hours_between_two_shifts)
+ .given(
+ employee1,
+ employee2,
+ Shift(
+ id="1",
+ start=DAY_END_TIME,
+ end=DAY_START_TIME + timedelta(days=1),
+ location="Location",
+ required_skill="Skill",
+ employee=employee1,
+ ),
+ Shift(
+ id="2",
+ start=DAY_START_TIME,
+ end=DAY_END_TIME,
+ location="Location 2",
+ required_skill="Skill",
+ employee=employee1,
+ ),
+ )
+ .penalizes_by(600)
+ )
+
+ (
+ constraint_verifier.verify_that(at_least_10_hours_between_two_shifts)
+ .given(
+ employee1,
+ employee2,
+ Shift(
+ id="1",
+ start=DAY_START_TIME,
+ end=DAY_END_TIME,
+ location="Location",
+ required_skill="Skill",
+ employee=employee1,
+ ),
+ Shift(
+ id="2",
+ start=DAY_END_TIME + timedelta(hours=10),
+ end=DAY_START_TIME + timedelta(days=1),
+ location="Location 2",
+ required_skill="Skill",
+ employee=employee1,
+ ),
+ )
+ .penalizes(0)
+ )
+
+ (
+ constraint_verifier.verify_that(at_least_10_hours_between_two_shifts)
+ .given(
+ employee1,
+ employee2,
+ Shift(
+ id="1",
+ start=DAY_START_TIME,
+ end=DAY_END_TIME,
+ location="Location",
+ required_skill="Skill",
+ employee=employee1,
+ ),
+ Shift(
+ id="2",
+ start=AFTERNOON_END_TIME,
+ end=DAY_START_TIME + timedelta(days=1),
+ location="Location 2",
+ required_skill="Skill",
+ employee=employee2,
+ ),
+ )
+ .penalizes(0)
+ )
+
+ (
+ constraint_verifier.verify_that(no_overlapping_shifts)
+ .given(
+ employee1,
+ employee2,
+ Shift(
+ id="1",
+ start=DAY_START_TIME,
+ end=DAY_END_TIME,
+ location="Location",
+ required_skill="Skill",
+ employee=employee1,
+ ),
+ Shift(
+ id="2",
+ start=DAY_START_TIME + timedelta(days=1),
+ end=DAY_END_TIME + timedelta(days=1),
+ location="Location 2",
+ required_skill="Skill",
+ employee=employee1,
+ ),
+ )
+ .penalizes(0)
+ )
+
+
+def test_unavailable_employee():
+ employee1 = Employee(name="Amy", unavailable_dates={DAY_1, DAY_3})
+ employee2 = Employee(name="Beth")
+
+ (
+ constraint_verifier.verify_that(unavailable_employee)
+ .given(
+ employee1,
+ employee2,
+ Shift(
+ id="1",
+ start=DAY_START_TIME,
+ end=DAY_END_TIME,
+ location="Location",
+ required_skill="Skill",
+ employee=employee1,
+ ),
+ )
+ .penalizes_by(timedelta(hours=8) // timedelta(minutes=1))
+ )
+
+ (
+ constraint_verifier.verify_that(unavailable_employee)
+ .given(
+ employee1,
+ employee2,
+ Shift(
+ id="1",
+ start=DAY_START_TIME - timedelta(days=1),
+ end=DAY_END_TIME,
+ location="Location",
+ required_skill="Skill",
+ employee=employee1,
+ ),
+ )
+ .penalizes_by(timedelta(hours=17) // timedelta(minutes=1))
+ )
+
+ (
+ constraint_verifier.verify_that(unavailable_employee)
+ .given(
+ employee1,
+ employee2,
+ Shift(
+ id="1",
+ start=DAY_START_TIME + timedelta(days=1),
+ end=DAY_END_TIME + timedelta(days=1),
+ location="Location",
+ required_skill="Skill",
+ employee=employee1,
+ ),
+ )
+ .penalizes(0)
+ )
+
+ (
+ constraint_verifier.verify_that(unavailable_employee)
+ .given(
+ employee1,
+ employee2,
+ Shift(
+ id="1",
+ start=DAY_START_TIME,
+ end=DAY_END_TIME,
+ location="Location",
+ required_skill="Skill",
+ employee=employee2,
+ ),
+ )
+ .penalizes(0)
+ )
+
+
+def test_undesired_day_for_employee():
+ employee1 = Employee(name="Amy", undesired_dates={DAY_1, DAY_3})
+ employee2 = Employee(name="Beth")
+
+ (
+ constraint_verifier.verify_that(undesired_day_for_employee)
+ .given(
+ employee1,
+ employee2,
+ Shift(
+ id="1",
+ start=DAY_START_TIME,
+ end=DAY_END_TIME,
+ location="Location",
+ required_skill="Skill",
+ employee=employee1,
+ ),
+ )
+ .penalizes_by(timedelta(hours=8) // timedelta(minutes=1))
+ )
+
+ (
+ constraint_verifier.verify_that(undesired_day_for_employee)
+ .given(
+ employee1,
+ employee2,
+ Shift(
+ id="1",
+ start=DAY_START_TIME - timedelta(days=1),
+ end=DAY_END_TIME,
+ location="Location",
+ required_skill="Skill",
+ employee=employee1,
+ ),
+ )
+ .penalizes_by(timedelta(hours=17) // timedelta(minutes=1))
+ )
+
+ (
+ constraint_verifier.verify_that(undesired_day_for_employee)
+ .given(
+ employee1,
+ employee2,
+ Shift(
+ id="1",
+ start=DAY_START_TIME + timedelta(days=1),
+ end=DAY_END_TIME + timedelta(days=1),
+ location="Location",
+ required_skill="Skill",
+ employee=employee1,
+ ),
+ )
+ .penalizes(0)
+ )
+
+ (
+ constraint_verifier.verify_that(undesired_day_for_employee)
+ .given(
+ employee1,
+ employee2,
+ Shift(
+ id="1",
+ start=DAY_START_TIME,
+ end=DAY_END_TIME,
+ location="Location",
+ required_skill="Skill",
+ employee=employee2,
+ ),
+ )
+ .penalizes(0)
+ )
+
+
+def test_desired_day_for_employee():
+ employee1 = Employee(name="Amy", desired_dates={DAY_1, DAY_3})
+ employee2 = Employee(name="Beth")
+
+ (
+ constraint_verifier.verify_that(desired_day_for_employee)
+ .given(
+ employee1,
+ employee2,
+ Shift(
+ id="1",
+ start=DAY_START_TIME,
+ end=DAY_END_TIME,
+ location="Location",
+ required_skill="Skill",
+ employee=employee1,
+ ),
+ )
+ .rewards_with(timedelta(hours=8) // timedelta(minutes=1))
+ )
+
+ (
+ constraint_verifier.verify_that(desired_day_for_employee)
+ .given(
+ employee1,
+ employee2,
+ Shift(
+ id="1",
+ start=DAY_START_TIME - timedelta(days=1),
+ end=DAY_END_TIME,
+ location="Location",
+ required_skill="Skill",
+ employee=employee1,
+ ),
+ )
+ .rewards_with(timedelta(hours=17) // timedelta(minutes=1))
+ )
+
+ (
+ constraint_verifier.verify_that(desired_day_for_employee)
+ .given(
+ employee1,
+ employee2,
+ Shift(
+ id="1",
+ start=DAY_START_TIME + timedelta(days=1),
+ end=DAY_END_TIME + timedelta(days=1),
+ location="Location",
+ required_skill="Skill",
+ employee=employee1,
+ ),
+ )
+ .rewards(0)
+ )
+
+ (
+ constraint_verifier.verify_that(desired_day_for_employee)
+ .given(
+ employee1,
+ employee2,
+ Shift(
+ id="1",
+ start=DAY_START_TIME,
+ end=DAY_END_TIME,
+ location="Location",
+ required_skill="Skill",
+ employee=employee2,
+ ),
+ )
+ .rewards(0)
+ )
+
+
+def test_max_shifts_per_employee():
+ employee = Employee(name="Amy")
+
+ # 12 shifts: no violation (at the limit)
+ shifts_12 = [
+ Shift(
+ id=str(i),
+ start=DAY_START_TIME + timedelta(days=i),
+ end=DAY_END_TIME + timedelta(days=i),
+ location="Location",
+ required_skill="Skill",
+ employee=employee,
+ )
+ for i in range(12)
+ ]
+ (
+ constraint_verifier.verify_that(max_shifts_per_employee)
+ .given(employee, *shifts_12)
+ .penalizes(0)
+ )
+
+ # 13 shifts: penalty of 1 (13 - 12 = 1)
+ shifts_13 = [
+ Shift(
+ id=str(i),
+ start=DAY_START_TIME + timedelta(days=i),
+ end=DAY_END_TIME + timedelta(days=i),
+ location="Location",
+ required_skill="Skill",
+ employee=employee,
+ )
+ for i in range(13)
+ ]
+ (
+ constraint_verifier.verify_that(max_shifts_per_employee)
+ .given(employee, *shifts_13)
+ .penalizes_by(1)
+ )
+
+ # 15 shifts: penalty of 3 (15 - 12 = 3)
+ shifts_15 = [
+ Shift(
+ id=str(i),
+ start=DAY_START_TIME + timedelta(days=i),
+ end=DAY_END_TIME + timedelta(days=i),
+ location="Location",
+ required_skill="Skill",
+ employee=employee,
+ )
+ for i in range(15)
+ ]
+ (
+ constraint_verifier.verify_that(max_shifts_per_employee)
+ .given(employee, *shifts_15)
+ .penalizes_by(3)
+ )
+
+
+def test_balance_employee_shift_assignments():
+ employee1 = Employee(name="Amy", desired_dates={DAY_1, DAY_3})
+ employee2 = Employee(name="Beth")
+
+ # No employees have shifts assigned; the schedule is perfectly balanced.
+ (
+ constraint_verifier.verify_that(balance_employee_shift_assignments)
+ .given(employee1, employee2)
+ .penalizes_by(0)
+ )
+
+ # Only one employee has shifts assigned; the schedule is less balanced.
+ (
+ constraint_verifier.verify_that(balance_employee_shift_assignments)
+ .given(
+ employee1,
+ employee2,
+ Shift(
+ id="1",
+ start=DAY_START_TIME,
+ end=DAY_END_TIME,
+ location="Location",
+ required_skill="Skill",
+ employee=employee1,
+ ),
+ )
+ .penalizes_by_more_than(0)
+ )
+
+ # Every employee has a shift assigned; the schedule is once again perfectly balanced.
+ (
+ constraint_verifier.verify_that(balance_employee_shift_assignments)
+ .given(
+ employee1,
+ employee2,
+ Shift(
+ id="1",
+ start=DAY_START_TIME,
+ end=DAY_END_TIME,
+ location="Location",
+ required_skill="Skill",
+ employee=employee1,
+ ),
+ Shift(
+ id="2",
+ start=DAY_START_TIME,
+ end=DAY_END_TIME,
+ location="Location",
+ required_skill="Skill",
+ employee=employee2,
+ ),
+ )
+ .penalizes_by(0)
+ )
diff --git a/tests/test_feasible.py b/tests/test_feasible.py
new file mode 100644
index 0000000000000000000000000000000000000000..94859e8be6f101de06dbc1c8a813a3c10e8c0d15
--- /dev/null
+++ b/tests/test_feasible.py
@@ -0,0 +1,42 @@
+from solverforge_legacy.solver import SolverFactory
+from solverforge_legacy.solver.config import (
+ SolverConfig,
+ ScoreDirectorFactoryConfig,
+ TerminationConfig,
+ Duration,
+ TerminationCompositionStyle,
+)
+
+from employee_scheduling.rest_api import app
+from employee_scheduling.domain import EmployeeScheduleModel
+from employee_scheduling.converters import model_to_schedule
+
+from fastapi.testclient import TestClient
+from time import sleep
+from pytest import fail
+
+client = TestClient(app)
+
+
+def test_feasible():
+ demo_data_response = client.get("/demo-data/SMALL")
+ assert demo_data_response.status_code == 200
+
+ job_id_response = client.post("/schedules", json=demo_data_response.json())
+ assert job_id_response.status_code == 200
+ job_id = job_id_response.text[1:-1]
+
+ ATTEMPTS = 1_000
+ for _ in range(ATTEMPTS):
+ sleep(0.1)
+ schedule_response = client.get(f"/schedules/{job_id}")
+ schedule_json = schedule_response.json()
+ schedule_model = EmployeeScheduleModel.model_validate(schedule_json)
+ schedule = model_to_schedule(schedule_model)
+ if schedule.score is not None and schedule.score.is_feasible:
+ stop_solving_response = client.delete(f"/schedules/{job_id}")
+ assert stop_solving_response.status_code == 200
+ return
+
+ client.delete(f"/schedules/{job_id}")
+ fail("solution is not feasible")