diff --git a/LICENSE.txt b/LICENSE.txt
new file mode 100644
index 0000000000000000000000000000000000000000..9cf106272ac3b56b0c4c80218e8fc10a664ca5f4
--- /dev/null
+++ b/LICENSE.txt
@@ -0,0 +1,19 @@
+MIT License
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git "a/cycle_einzeln_constraint\303\274berpr\303\274fung.log" "b/cycle_einzeln_constraint\303\274berpr\303\274fung.log"
deleted file mode 100644
index 0601dea0fd1d69199f4dde5c933afc05af08aaa6..0000000000000000000000000000000000000000
--- "a/cycle_einzeln_constraint\303\274berpr\303\274fung.log"
+++ /dev/null
@@ -1,3 +0,0 @@
-2024-06-02 01:44:43,960 - INFO - 
- Prozess f�r githubtests/medium_test_set/instances/10.gr gestartet
-2024-06-02 01:44:43,960 - INFO - Die Ausgabedatei wird mytests/solutions\10.sol sein
diff --git a/len_cycles.log b/len_cycles.log
deleted file mode 100644
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000
diff --git a/optimiert_mit_besserer_zykel_betrachtung_und_cache.log b/optimiert_mit_besserer_zykel_betrachtung_und_cache.log
deleted file mode 100644
index 533120b56c5bce5e858fa9a1e7ecdd88f345083f..0000000000000000000000000000000000000000
--- a/optimiert_mit_besserer_zykel_betrachtung_und_cache.log
+++ /dev/null
@@ -1,4 +0,0 @@
-2024-06-02 14:58:47,921 - Prozess f�r githubtests/medium_test_set/instances/10.gr gestartet
-2024-06-02 14:58:47,921 - Die Ausgabedatei wird mytests/solutions\10.sol sein
-2024-06-03 14:43:43,229 - Prozess f�r githubtests/medium_test_set/instances/10.gr gestartet
-2024-06-03 14:43:43,229 - Die Ausgabedatei wird mytests/solutions\10.sol sein
diff --git a/src/len_cycles.log b/src/len_cycles.log
index c1fce25556c0cd7c38ff4590af3d88610d241829..981be427ca14fd310045feb0aa40f24f2961e998 100644
--- a/src/len_cycles.log
+++ b/src/len_cycles.log
@@ -601,3 +601,41 @@
 2024-06-06 11:57:44,643 - INFO - Prozess f�r mytests/instances/10.gr gestartet
 2024-06-06 11:57:44,653 - INFO - Gr��en der Partitionen: A=8358, B=8064
 2024-06-06 11:57:44,664 - INFO - 16489 Kanten geladen.
+2024-06-06 13:38:34,997 - INFO - Prozess f�r mytests/instances/0.gr gestartet
+2024-06-06 13:38:35,006 - INFO - Gr��en der Partitionen: A=3, B=3
+2024-06-06 13:38:35,006 - INFO - 3 Kanten geladen.
+2024-06-06 13:38:35,168 - INFO - Status der L�sung: Optimal
+2024-06-06 13:38:35,169 - INFO - Optimale L�sung gefunden. Ergebnisse werden gespeichert.
+2024-06-06 13:38:35,169 - INFO - Alle gefundenen Zyklen: 0 St�ck.
+2024-06-06 13:38:35,169 - INFO - Verstrichene Zeit: 0:00:00.172 (h:m:s.ms) 
+
+2024-06-06 13:38:35,170 - INFO - Prozess f�r mytests/instances/1.gr gestartet
+2024-06-06 13:38:35,179 - INFO - Gr��en der Partitionen: A=780, B=743
+2024-06-06 13:38:35,180 - INFO - 1522 Kanten geladen.
+2024-06-06 14:26:18,212 - INFO - Status der L�sung: Optimal
+2024-06-06 14:26:18,214 - INFO - Optimale L�sung gefunden. Ergebnisse werden gespeichert.
+2024-06-06 14:26:18,483 - INFO - Alle gefundenen Zyklen: 0 St�ck.
+2024-06-06 14:26:18,484 - INFO - Verstrichene Zeit: 0:47:43.313 (h:m:s.ms) 
+
+2024-06-06 14:26:19,182 - INFO - Prozess f�r mytests/instances/10.gr gestartet
+2024-06-06 14:26:19,195 - INFO - Gr��en der Partitionen: A=8358, B=8064
+2024-06-06 14:26:19,207 - INFO - 16489 Kanten geladen.
+2024-06-06 15:35:46,182 - INFO - Prozess f�r mytests/instances/0.gr gestartet
+2024-06-06 15:35:46,183 - INFO - Gr��en der Partitionen: A=3, B=3
+2024-06-06 15:35:46,183 - INFO - 3 Kanten geladen.
+2024-06-06 15:35:46,575 - INFO - Status der L�sung: Optimal
+2024-06-06 15:35:46,575 - INFO - Optimale L�sung gefunden. Ergebnisse werden gespeichert.
+2024-06-06 15:35:46,576 - INFO - Alle gefundenen Zyklen: 0 St�ck.
+2024-06-06 15:35:46,576 - INFO - Verstrichene Zeit: 0:00:00.394 (h:m:s.ms) 
+
+2024-06-06 15:35:46,577 - INFO - Prozess f�r mytests/instances/1.gr gestartet
+2024-06-06 15:35:46,577 - INFO - Gr��en der Partitionen: A=780, B=743
+2024-06-06 15:35:46,578 - INFO - 1522 Kanten geladen.
+2024-06-06 16:22:11,744 - INFO - Status der L�sung: Optimal
+2024-06-06 16:22:11,745 - INFO - Optimale L�sung gefunden. Ergebnisse werden gespeichert.
+2024-06-06 16:22:11,844 - INFO - Alle gefundenen Zyklen: 0 St�ck.
+2024-06-06 16:22:11,845 - INFO - Verstrichene Zeit: 0:46:25.267 (h:m:s.ms) 
+
+2024-06-06 16:22:12,148 - INFO - Prozess f�r mytests/instances/10.gr gestartet
+2024-06-06 16:22:12,158 - INFO - Gr��en der Partitionen: A=8358, B=8064
+2024-06-06 16:22:12,169 - INFO - 16489 Kanten geladen.
diff --git a/src/logfile.log b/src/logfile.log
index 9d85a2d880cdca711a26a834d85a25ea7ec4a349..f2f6adf0bdcf0c2bac01c07ff0851029f81df6d2 100644
Binary files a/src/logfile.log and b/src/logfile.log differ
diff --git a/src/main_iterative_opt.py b/src/main_iterative_opt.py
index 3b41a13522399a2e5b09eb912481ca88b188921c..064a338159ee04c533547c5c102d471ea4aafc5b 100644
--- a/src/main_iterative_opt.py
+++ b/src/main_iterative_opt.py
@@ -226,7 +226,7 @@ def process_directory(directory_path):
 # directory_path = 'mytests/instances/'
 # process_directory(directory_path)
 
-# test_file = 'githubtests/tiny_test_set/instances/complete_4_5.gr'
-test_file = 'githubtests/medium_test_set/instances/11.gr'
+test_file = 'githubtests/tiny_test_set/instances/complete_4_5.gr'
+#test_file = 'githubtests/medium_test_set/instances/11.gr'
 # test_file = 'mytests/instances/1.gr'
 solve_bipartite_minimization(test_file)
diff --git a/src/main_iterative_opt2 b/src/main_iterative_opt2.py
similarity index 100%
rename from src/main_iterative_opt2
rename to src/main_iterative_opt2.py
diff --git a/src/main_iterative_opt3.py b/src/main_iterative_opt3.py
new file mode 100644
index 0000000000000000000000000000000000000000..694d515960f62fe12878720652f12815623ba47b
--- /dev/null
+++ b/src/main_iterative_opt3.py
@@ -0,0 +1,241 @@
+import math
+import os
+import logging
+import time
+from datetime import datetime, timedelta
+from pulp import *
+from collections import defaultdict, deque
+
+# Erstelle einen FileHandler und einen StreamHandler
+# betrachtet alle möglichen zykel, geht diese aber schrittweise ab und betrachtet nicht alle auf einmal, mit überprüfung für existenz der constraints
+file_handler = logging.FileHandler('optimiert_mit_besserer_zykel_betrachtung_und_cache.log')
+console_handler = logging.StreamHandler()
+logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(message)s', handlers=[file_handler, console_handler])
+
+def count_crossings_via_variables(c_vars):
+    crossings = 0
+    for c_var in c_vars.values():
+        if c_var.varValue == 1:
+            crossings += 1
+    return crossings
+
+def prioritize_cycles(cycles):
+    # Beispiel: Zyklen nach Länge priorisieren
+    cycles.sort(key=len, reverse=False)
+    return cycles
+
+def detect_cycle(graph, node, visited, rec_stack, path, cycles):
+    visited[node] = True
+    rec_stack[node] = True
+    path.append(node)
+
+    for neighbor in graph[node]:
+        if not visited[neighbor]:
+            detect_cycle(graph, neighbor, visited, rec_stack, path, cycles)
+        elif rec_stack[neighbor]:
+            cycle_start = path.index(neighbor)
+            cycles.append(path[cycle_start:].copy())
+
+    rec_stack[node] = False
+    path.pop()
+
+def find_all_cycles(graph, nodes):
+    visited = {node: False for node in nodes}
+    rec_stack = {node: False for node in nodes}
+    path = []
+    cycles = []
+
+    for node in nodes:
+        if not visited[node]:
+            detect_cycle(graph, node, visited, rec_stack, path, cycles)
+
+    # Entferne doppelte Zyklen
+    unique_cycles = []
+    seen = set()
+    for cycle in cycles:
+        cycle_tuple = tuple(sorted(cycle))
+        if cycle_tuple not in seen:
+            seen.add(cycle_tuple)
+            unique_cycles.append(cycle)
+
+    return unique_cycles
+
+def add_cycle_constraints(prob, y, cycles, added_constraints):
+    for cycle in cycles:
+        cycle = list(dict.fromkeys(cycle))
+        for i in range(len(cycle)):
+            for j in range(i + 1, len(cycle)):
+                constraint_1 = ((cycle[i], cycle[j]), (cycle[j], cycle[i]))
+                if constraint_1 not in added_constraints:
+                    # einer von beiden ist wahr
+                    prob += y[(cycle[i], cycle[j])] + y[(cycle[j], cycle[i])] == 1
+                    added_constraints.add(constraint_1)
+                    
+                for k in range(j + 1, len(cycle)):
+                    if (cycle[i], cycle[j]) in y and (cycle[j], cycle[k]) in y and (cycle[i], cycle[k]) in y:
+                        constraint_2 = ((cycle[i], cycle[j]), (cycle[j], cycle[k]), (cycle[i], cycle[k]))
+                        if constraint_2 not in added_constraints:
+                            #i < j und j < l -> i< k reihenfolgen constraint transitivität
+                            prob += 0 <= y[(cycle[i], cycle[j])] + y[(cycle[j], cycle[k])] <= 1 + y[(cycle[i], cycle[k])]
+                            added_constraints.add(constraint_2)
+        logging.info(f"Constraint hinzugefügt für: {cycle}")
+    logging.info(f"Constraint hinzugefügt für alle bestehenden cycles.")
+
+def solve_bipartite_minimization(graph_file):
+    start_time = time()
+    logging.info(f"Prozess für {graph_file} gestartet")
+    base_name = os.path.basename(graph_file)
+    new_base_name = base_name.replace('.gr', '.sol')
+    new_base_name1 = base_name.replace('.gr', '.cros')
+    output_file = os.path.join('mytests/solutions', new_base_name)
+    output_file1 = os.path.join('mytests/crossings', new_base_name1)
+    logging.info(f"Die Ausgabedatei wird {output_file} sein")
+
+    edges = []
+    with open(graph_file, "r") as file:
+        for line in file:
+            if line.startswith('c'):
+                continue
+            elif line.startswith('p'):
+                parts = line.split()
+                n0 = int(parts[2])
+                n1 = int(parts[3])
+                logging.info(f"Größen der Partitionen: A={n0}, B={n1}")
+            else:
+                x, y = map(int, line.split())
+                edges.append((x, y))
+    logging.info(f"{len(edges)} Kanten geladen.")
+
+    prob = LpProblem("Minimize_Crossings", LpMinimize)
+    y = {(i, j): LpVariable(f"y_{i}_{j}", 0, 1, cat='Binary') for i in range(n0 + 1, n0 + n1 + 1) for j in range(n0 + 1, n0 + n1 + 1) if i != j}
+    c = {(i, j, k, l): LpVariable(f"c_{i}_{j}_{k}_{l}", 0, 1, cat='Binary') for (i, j) in edges for (k, l) in edges}
+    logging.info("x, y und c geladen.")
+
+    prob += lpSum(c.values())
+    logging.info("Zielfunktion aufgestellt.")
+
+    for (i, j) in edges:
+        for (k, l) in edges:
+            if k > i:
+                if j > l:
+                    prob += c[(i, j, k, l)] == y[(l, j)]
+                elif l > j:
+                    prob += c[(i, j, k, l)] == 1 - y[(j, l)]
+    logging.info("Crossing Constraints aufgestellt.")
+
+    added_constraints = set()
+    iteration = 0
+    max_cycle_length = 9
+
+    while True:
+        iteration += 1
+        prob.solve()
+        logging.info(f"Status der Lösung: {LpStatus[prob.status]}")
+        if prob.status != LpStatusOptimal:
+            logging.warning("Keine optimale Lösung gefunden.")
+            break
+
+        graph = defaultdict(list)
+        in_degree = defaultdict(int)
+        nodes = range(n0 + 1, n0 + n1 + 1)
+        for i in nodes:
+            for j in nodes:
+                if i != j:
+                    y_ij = y.get((i, j))
+                    if y_ij is not None:
+                        if y_ij.varValue == 1:
+                            graph[i].append(j)
+                            in_degree[j] += 1
+                        elif y_ij.varValue == 0:
+                            graph[j].append(i)
+                            in_degree[i] += 1
+
+        all_cycles = find_all_cycles(graph, nodes)
+        logging.info(f"Alle gefundenen Zyklen: {len(all_cycles)} Stück.")
+        
+        if not all_cycles:
+            break
+
+        if iteration == 1:
+            initial_cycles = len(all_cycles)
+
+        prioritized_cycles = prioritize_cycles(all_cycles)
+        logging.info(f"Priorisierte Zyklen aufgestellt.")
+
+        # Dynamisch den Prozentsatz anpassen
+        current_cycle_count = len(all_cycles)
+        if current_cycle_count > initial_cycles * 0.75:
+            cycle_percentage = 0.25
+        elif current_cycle_count > initial_cycles * 0.50:
+            cycle_percentage = 0.20
+        elif current_cycle_count > initial_cycles * 0.25:
+            cycle_percentage = 0.15
+        else:
+            cycle_percentage = 0.10
+
+        # Berechne k basierend auf dem aktuellen Prozentsatz
+        k = min(int(math.floor(len(prioritized_cycles) * cycle_percentage)), len(prioritized_cycles))
+        cycles_to_process = prioritized_cycles[:k]
+        logging.info(f"Zyklen zur Verarbeitung in dieser Iteration: {cycles_to_process}")
+        logging.info(f"Anzahl der Zyklen zur Verarbeitung in dieser Iteration: {len(cycles_to_process)}")
+        add_cycle_constraints(prob, y, cycles_to_process, added_constraints)
+
+    if prob.status == LpStatusOptimal:
+        logging.info("Optimale Lösung gefunden. Ergebnisse werden gespeichert.")
+
+        zero_in_degree_queue = deque([i for i in nodes if in_degree[i] == 0])
+        sorted_b = []
+        while zero_in_degree_queue:
+            node = zero_in_degree_queue.popleft()
+            sorted_b.append(node)
+            for neighbor in graph[node]:
+                in_degree[neighbor] -= 1
+                if in_degree[neighbor] == 0:
+                    zero_in_degree_queue.append(neighbor)
+
+        a = count_crossings_via_variables(c)
+        os.makedirs(os.path.dirname(output_file), exist_ok=True)
+        with open(output_file, 'w') as f:
+            for b in sorted_b:
+                f.write(f"{b}\n")
+                print(f"{b}")
+        print("Crossings: ", a) 
+        logging.info(f"Ergebnisse in {output_file} gespeichert")
+
+        a = count_crossings_via_variables(c)
+        os.makedirs(os.path.dirname(output_file1), exist_ok=True)
+        with open(output_file1, 'w') as f:
+            f.write(f"Crossings: {a}\n")     
+        logging.info(f"Crossings: {a}, in {output_file1} gespeichert")
+
+        end_time = time()
+        elapsed_time = end_time - start_time
+        elapsed_time_td = timedelta(seconds=elapsed_time)
+        formatted_time = str(elapsed_time_td)
+        formatted_time = formatted_time[:-3] 
+        logging.info(f"Verstrichene Zeit: {formatted_time} (h:m:s.ms) \n")
+    else:
+        logging.warning("Keine optimale Lösung gefunden.")
+        end_time = time()
+        elapsed_time = end_time - start_time
+        elapsed_time_td = timedelta(seconds=elapsed_time)
+        formatted_time = str(elapsed_time_td)
+        formatted_time = formatted_time[:-3] 
+        logging.info(f"Verstrichene Zeit: {formatted_time} (h:m:s.ms) \n")
+
+def process_directory(directory_path):
+    for filename in os.listdir(directory_path):
+        if filename.endswith('.gr'):
+            file_path = os.path.join(directory_path, filename)
+            solve_bipartite_minimization(file_path)
+            logging.info(f"Verarbeitung abgeschlossen für {file_path}")
+
+#directory_path = 'githubtests/tiny_test_set/instances/'
+directory_path = 'githubtests/medium_test_set/instances/'
+# directory_path = 'mytests/instances/'
+process_directory(directory_path)
+
+# test_file = 'githubtests/tiny_test_set/instances/complete_4_5.gr'
+#test_file = 'githubtests/medium_test_set/instances/10.gr'
+# test_file = 'mytests/instances/1.gr'
+#solve_bipartite_minimization(test_file)
diff --git a/src/solver.bat b/src/solver.bat
index 0aa979f3bfc376e69e3ae4fca0a3a4f1cb5a20e0..77b1bc63e013c3afb5ef653260df36c9a243e8e0 100644
--- a/src/solver.bat
+++ b/src/solver.bat
@@ -1,4 +1,3 @@
 @echo off
-call .\.venv\Scripts\activate
 python solver_opt.py %*
-call deactivate
+
diff --git a/src/solver_opt.py b/src/solver_opt.py
index 8ce40fad7db8bd74e4e184dc72bdcf0f4c73b58a..c858917488d4fae6a4448972306b7a0e853eb2e4 100644
--- a/src/solver_opt.py
+++ b/src/solver_opt.py
@@ -1,17 +1,8 @@
 import math
-import os
-import logging
-from datetime import timedelta
-import time
-import argparse
+import sys
 from pulp import *
 from collections import defaultdict, deque
 
-# Konfiguriere Logging
-file_handler = logging.FileHandler('logfile.log')
-console_handler = logging.StreamHandler()
-logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s', handlers=[file_handler, console_handler])
-
 def count_crossings_via_variables(c_vars):
     crossings = 0
     for c_var in c_vars.values():
@@ -78,39 +69,26 @@ def add_cycle_constraints(prob, y, cycles, added_constraints):
                             #i < j und j < l -> i< k reihenfolgen constraint transitivität
                             prob += 0 <= y[(cycle[i], cycle[j])] + y[(cycle[j], cycle[k])] <= 1 + y[(cycle[i], cycle[k])]
                             added_constraints.add(constraint_2)
-        logging.info(f"Constraint hinzugefügt für: {cycle}")
-    logging.info(f"Constraint hinzugefügt für alle bestehenden cycles.")
-
-def solve_bipartite_minimization(input_file, output_file):
-    start_time = time()
-    logging.info(f"Prozess für {input_file} gestartet")
-    # Extrahiere den Basisnamen der Eingabedatei
-    base_name = os.path.basename(input_file)
-    # Erstelle den Ausgabepfad
-    logging.info(f"Die Ausgabedatei wird {output_file} sein")
 
+def solve_bipartite_minimization(input_lines):
     edges = []
-    with open(input_file, "r") as file:
-        for line in file:
-            if line.startswith('c'):
-                continue
-            elif line.startswith('p'):
-                parts = line.split()
-                n0 = int(parts[2])
-                n1 = int(parts[3])
-                logging.info(f"Größen der Partitionen: A={n0}, B={n1}")
-            else:
-                x, y = map(int, line.split())
-                edges.append((x, y))
-    logging.info(f"{len(edges)} Kanten geladen.")
+    for line in input_lines:
+        if line.startswith('c'):
+            continue
+        elif line.startswith('p'):
+            parts = line.split()
+            n0 = int(parts[2])
+            n1 = int(parts[3])
+        else:
+            x, y = map(int, line.split())
+            edges.append((x, y))
+        logging.info(f"{len(edges)} Kanten geladen.")
 
     prob = LpProblem("Minimize_Crossings", LpMinimize)
     y = {(i, j): LpVariable(f"y_{i}_{j}", 0, 1, cat='Binary') for i in range(n0 + 1, n0 + n1 + 1) for j in range(n0 + 1, n0 + n1 + 1) if i != j}
     c = {(i, j, k, l): LpVariable(f"c_{i}_{j}_{k}_{l}", 0, 1, cat='Binary') for (i, j) in edges for (k, l) in edges}
-    logging.info("x, y und c geladen.")
 
     prob += lpSum(c.values())
-    logging.info("Zielfunktion aufgestellt.")
 
     for (i, j) in edges:
         for (k, l) in edges:
@@ -118,8 +96,7 @@ def solve_bipartite_minimization(input_file, output_file):
                 if j > l:
                     prob += c[(i, j, k, l)] == y[(l, j)]
                 elif l > j:
-                    prob += c[(i, j, k, l)] == 1 - y[(j, l)]
-    logging.info("Crossing Constraints aufgestellt.")
+                    prob += c[(i, j, k, l)] == y[(l, j)]
 
     added_constraints = set()
     iteration = 0
@@ -127,10 +104,9 @@ def solve_bipartite_minimization(input_file, output_file):
 
     while True:
         iteration += 1
-        prob.solve()
-        logging.info(f"Status der Lösung: {LpStatus[prob.status]}")
+        prob.solve(PULP_CBC_CMD(msg=0))
+
         if prob.status != LpStatusOptimal:
-            logging.warning("Keine optimale Lösung gefunden.")
             break
 
         graph = defaultdict(list)
@@ -149,29 +125,23 @@ def solve_bipartite_minimization(input_file, output_file):
                             in_degree[i] += 1
 
         all_cycles = find_all_cycles(graph, nodes)
-        logging.info(f"Alle gefundenen Zyklen: {len(all_cycles)} Stück.")
         
         if not all_cycles:
             break
 
         prioritized_cycles = prioritize_cycles(all_cycles)
-        logging.info(f"Priorisierte Zyklen aufgestellt.")
 
+        # Berechne k basierend auf dem aktuellen Prozentsatz
         # Überprüfe die Länge des längsten Zyklus und passe k entsprechend an
         if len(prioritized_cycles[-1]) > max_cycle_length:
             k = min(int(math.floor(len(prioritized_cycles)/4)), len(prioritized_cycles))
         else:
             k = len(prioritized_cycles)
         # Nehme nur die ersten k Zyklen aus der priorisierten Liste
-        #k = min(int(round(len(prioritized_cycles)/4 - iteration)), len(prioritized_cycles))
-        #k = min(max_k, len(prioritized_cycles))
         cycles_to_process = prioritized_cycles[:k]
-        logging.info(f"Zyklen zur Verarbeitung in dieser Iteration: {cycles_to_process}")
-        logging.info(f"Anzahl der Zyklen zur Verarbeitung in dieser Iteration: {len(cycles_to_process)}")
         add_cycle_constraints(prob, y, cycles_to_process, added_constraints)
 
     if prob.status == LpStatusOptimal:
-        logging.info("Optimale Lösung gefunden. Ergebnisse werden gespeichert.")
 
         zero_in_degree_queue = deque([i for i in nodes if in_degree[i] == 0])
         sorted_b = []
@@ -184,36 +154,18 @@ def solve_bipartite_minimization(input_file, output_file):
                     zero_in_degree_queue.append(neighbor)
 
         a = count_crossings_via_variables(c)
-        os.makedirs(os.path.dirname(output_file), exist_ok=True)
-        with open(output_file, 'w') as f:
-            for b in sorted_b:
-                f.write(f"{b}\n")
-                print(f"{b}")
-        print("Crossings: ", a) 
-        logging.info(f"Ergebnisse in {output_file} gespeichert")
-
-        end_time = time()
-        elapsed_time = end_time - start_time
-        elapsed_time_td = timedelta(seconds=elapsed_time)
-        formatted_time = str(elapsed_time_td)
-        formatted_time = formatted_time[:-3] 
-        logging.info(f"Verstrichene Zeit: {formatted_time} (h:m:s.ms) \n")
+        return sorted_b
     else:
-        logging.warning("Keine optimale Lösung gefunden.")
-        end_time = time()
-        elapsed_time = end_time - start_time
-        elapsed_time_td = timedelta(seconds=elapsed_time)
-        formatted_time = str(elapsed_time_td)
-        formatted_time = formatted_time[:-3] 
-        logging.info(f"Verstrichene Zeit: {formatted_time} (h:m:s.ms) \n")
-
+        return
+    
 def main():
-    parser = argparse.ArgumentParser(description="Solve Bipartite Minimization Problem")
-    parser.add_argument('input_file', type=str, help="The path to the input file")
-    parser.add_argument('output_file', type=str, help="The path to the output file")
-    args = parser.parse_args()
-
-    solve_bipartite_minimization(args.input_file, args.output_file)
-
+    input_lines = sys.stdin.read().strip().split('\n')
+    result = solve_bipartite_minimization(input_lines)
+    
+    if result:
+        print("\n".join(map(str, result)))
+    else:
+        print("No result")
+        
 if __name__ == "__main__":
     main()
diff --git a/src/solver_opt2.py b/src/solver_opt2.py
new file mode 100644
index 0000000000000000000000000000000000000000..fcb3581213ef6292011febee2aee0f51b4629c20
--- /dev/null
+++ b/src/solver_opt2.py
@@ -0,0 +1,238 @@
+import math
+import os
+import logging
+from datetime import timedelta
+import time
+import argparse
+from pulp import *
+from collections import defaultdict, deque
+
+# Konfiguriere Logging
+file_handler = logging.FileHandler('logfile.log')
+console_handler = logging.StreamHandler()
+logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s', handlers=[file_handler, console_handler])
+
+def count_crossings_via_variables(c_vars):
+    crossings = 0
+    for c_var in c_vars.values():
+        if c_var.varValue == 1:
+            crossings += 1
+    return crossings
+
+def prioritize_cycles(cycles):
+    # Beispiel: Zyklen nach Länge priorisieren
+    cycles.sort(key=len, reverse=False)
+    return cycles
+
+def detect_cycle(graph, node, visited, rec_stack, path, cycles):
+    visited[node] = True
+    rec_stack[node] = True
+    path.append(node)
+
+    for neighbor in graph[node]:
+        if not visited[neighbor]:
+            detect_cycle(graph, neighbor, visited, rec_stack, path, cycles)
+        elif rec_stack[neighbor]:
+            cycle_start = path.index(neighbor)
+            cycles.append(path[cycle_start:].copy())
+
+    rec_stack[node] = False
+    path.pop()
+
+def find_all_cycles(graph, nodes):
+    visited = {node: False for node in nodes}
+    rec_stack = {node: False for node in nodes}
+    path = []
+    cycles = []
+
+    for node in nodes:
+        if not visited[node]:
+            detect_cycle(graph, node, visited, rec_stack, path, cycles)
+
+    # Entferne doppelte Zyklen
+    unique_cycles = []
+    seen = set()
+    for cycle in cycles:
+        cycle_tuple = tuple(sorted(cycle))
+        if cycle_tuple not in seen:
+            seen.add(cycle_tuple)
+            unique_cycles.append(cycle)
+
+    return unique_cycles
+
+def add_cycle_constraints(prob, y, cycles, added_constraints):
+    for cycle in cycles:
+        cycle = list(dict.fromkeys(cycle))
+        for i in range(len(cycle)):
+            for j in range(i + 1, len(cycle)):
+                constraint_1 = ((cycle[i], cycle[j]), (cycle[j], cycle[i]))
+                if constraint_1 not in added_constraints:
+                    # einer von beiden ist wahr
+                    prob += y[(cycle[i], cycle[j])] + y[(cycle[j], cycle[i])] == 1
+                    added_constraints.add(constraint_1)
+                    
+                for k in range(j + 1, len(cycle)):
+                    if (cycle[i], cycle[j]) in y and (cycle[j], cycle[k]) in y and (cycle[i], cycle[k]) in y:
+                        constraint_2 = ((cycle[i], cycle[j]), (cycle[j], cycle[k]), (cycle[i], cycle[k]))
+                        if constraint_2 not in added_constraints:
+                            #i < j und j < l -> i< k reihenfolgen constraint transitivität
+                            prob += 0 <= y[(cycle[i], cycle[j])] + y[(cycle[j], cycle[k])] <= 1 + y[(cycle[i], cycle[k])]
+                            added_constraints.add(constraint_2)
+        logging.info(f"Constraint hinzugefügt für: {cycle}")
+    logging.info(f"Constraint hinzugefügt für alle bestehenden cycles.")
+
+def solve_bipartite_minimization(input_file, output_file):
+    start_time = time()
+    logging.info(f"Prozess für {input_file} gestartet")
+    # Erstelle den Ausgabepfad
+    logging.info(f"Die Ausgabedatei wird {output_file} sein")
+
+    edges = []
+    with open(input_file, "r") as file:
+        for line in file:
+            if line.startswith('c'):
+                continue
+            elif line.startswith('p'):
+                parts = line.split()
+                n0 = int(parts[2])
+                n1 = int(parts[3])
+                logging.info(f"Größen der Partitionen: A={n0}, B={n1}")
+            else:
+                x, y = map(int, line.split())
+                edges.append((x, y))
+    logging.info(f"{len(edges)} Kanten geladen.")
+
+    prob = LpProblem("Minimize_Crossings", LpMinimize)
+    y = {(i, j): LpVariable(f"y_{i}_{j}", 0, 1, cat='Binary') for i in range(n0 + 1, n0 + n1 + 1) for j in range(n0 + 1, n0 + n1 + 1) if i != j}
+    c = {(i, j, k, l): LpVariable(f"c_{i}_{j}_{k}_{l}", 0, 1, cat='Binary') for (i, j) in edges for (k, l) in edges}
+    logging.info("x, y und c geladen.")
+
+    prob += lpSum(c.values())
+    logging.info("Zielfunktion aufgestellt.")
+
+    for (i, j) in edges:
+        for (k, l) in edges:
+            if k > i:
+                if j > l:
+                    prob += c[(i, j, k, l)] == y[(l, j)]
+                elif l > j:
+                    prob += c[(i, j, k, l)] == 1 - y[(j, l)]
+    logging.info("Crossing Constraints aufgestellt.")
+
+    added_constraints = set()
+    iteration = 0
+    max_cycle_length = 9
+
+    while True:
+        iteration += 1
+        prob.solve()
+        logging.info(f"Status der Lösung: {LpStatus[prob.status]}")
+        if prob.status != LpStatusOptimal:
+            logging.warning("Keine optimale Lösung gefunden.")
+            break
+
+        graph = defaultdict(list)
+        in_degree = defaultdict(int)
+        nodes = range(n0 + 1, n0 + n1 + 1)
+        for i in nodes:
+            for j in nodes:
+                if i != j:
+                    y_ij = y.get((i, j))
+                    if y_ij is not None:
+                        if y_ij.varValue == 1:
+                            graph[i].append(j)
+                            in_degree[j] += 1
+                        elif y_ij.varValue == 0:
+                            graph[j].append(i)
+                            in_degree[i] += 1
+
+        all_cycles = find_all_cycles(graph, nodes)
+        logging.info(f"Alle gefundenen Zyklen: {len(all_cycles)} Stück.")
+        
+        if not all_cycles:
+            break
+
+        if iteration == 1:
+            initial_cycles = len(all_cycles)
+
+        prioritized_cycles = prioritize_cycles(all_cycles)
+        logging.info(f"Priorisierte Zyklen aufgestellt.")
+        # Dynamisch den Prozentsatz anpassen
+        current_cycle_count = len(all_cycles)
+        if len(prioritized_cycles[-1]) <= max_cycle_length:
+            k = len(prioritized_cycles)
+        elif current_cycle_count > initial_cycles * 0.75:
+            cycle_percentage = 0.25
+            k = min(int(math.floor(len(prioritized_cycles) * cycle_percentage)), len(prioritized_cycles))
+        elif current_cycle_count > initial_cycles * 0.50:
+            cycle_percentage = 0.20
+            k = min(int(math.floor(len(prioritized_cycles) * cycle_percentage)), len(prioritized_cycles))
+        elif current_cycle_count > initial_cycles * 0.25:
+            cycle_percentage = 0.15
+            k = min(int(math.floor(len(prioritized_cycles) * cycle_percentage)), len(prioritized_cycles))
+        else:
+            cycle_percentage = 0.10
+            k = min(int(math.floor(len(prioritized_cycles) * cycle_percentage)), len(prioritized_cycles))
+
+        # Berechne k basierend auf dem aktuellen Prozentsatz
+        #k = min(int(math.floor(len(prioritized_cycles) * cycle_percentage)), len(prioritized_cycles))
+        # Überprüfe die Länge des längsten Zyklus und passe k entsprechend an
+        #if len(prioritized_cycles[-1]) > max_cycle_length:
+        #    k = min(int(math.floor(len(prioritized_cycles)/4)), len(prioritized_cycles))
+        #else:
+        #    k = len(prioritized_cycles)
+        # Nehme nur die ersten k Zyklen aus der priorisierten Liste
+        #k = min(int(round(len(prioritized_cycles)/4 - iteration)), len(prioritized_cycles))
+        #k = min(max_k, len(prioritized_cycles))
+        cycles_to_process = prioritized_cycles[:k]
+        logging.info(f"Zyklen zur Verarbeitung in dieser Iteration: {cycles_to_process}")
+        logging.info(f"Anzahl der Zyklen zur Verarbeitung in dieser Iteration: {len(cycles_to_process)}")
+        add_cycle_constraints(prob, y, cycles_to_process, added_constraints)
+
+    if prob.status == LpStatusOptimal:
+        logging.info("Optimale Lösung gefunden. Ergebnisse werden gespeichert.")
+
+        zero_in_degree_queue = deque([i for i in nodes if in_degree[i] == 0])
+        sorted_b = []
+        while zero_in_degree_queue:
+            node = zero_in_degree_queue.popleft()
+            sorted_b.append(node)
+            for neighbor in graph[node]:
+                in_degree[neighbor] -= 1
+                if in_degree[neighbor] == 0:
+                    zero_in_degree_queue.append(neighbor)
+
+        a = count_crossings_via_variables(c)
+        os.makedirs(os.path.dirname(output_file), exist_ok=True)
+        with open(output_file, 'w') as f:
+            for b in sorted_b:
+                f.write(f"{b}\n")
+                print(f"{b}")
+        print("Crossings: ", a) 
+        logging.info(f"Ergebnisse in {output_file} gespeichert")
+
+        end_time = time()
+        elapsed_time = end_time - start_time
+        elapsed_time_td = timedelta(seconds=elapsed_time)
+        formatted_time = str(elapsed_time_td)
+        formatted_time = formatted_time[:-3] 
+        logging.info(f"Verstrichene Zeit: {formatted_time} (h:m:s.ms) \n")
+    else:
+        logging.warning("Keine optimale Lösung gefunden.")
+        end_time = time()
+        elapsed_time = end_time - start_time
+        elapsed_time_td = timedelta(seconds=elapsed_time)
+        formatted_time = str(elapsed_time_td)
+        formatted_time = formatted_time[:-3] 
+        logging.info(f"Verstrichene Zeit: {formatted_time} (h:m:s.ms) \n")
+
+def main():
+    parser = argparse.ArgumentParser(description="Solve Bipartite Minimization Problem")
+    parser.add_argument('input_file', type=str, help="The path to the input file")
+    parser.add_argument('output_file', type=str, help="The path to the output file")
+    args = parser.parse_args()
+
+    solve_bipartite_minimization(args.input_file, args.output_file)
+
+if __name__ == "__main__":
+    main()
diff --git a/src/solver_opt3.py b/src/solver_opt3.py
new file mode 100644
index 0000000000000000000000000000000000000000..e466779f8d67c240b2331218b908d948c773d95e
--- /dev/null
+++ b/src/solver_opt3.py
@@ -0,0 +1,228 @@
+import math
+import os
+import logging
+from datetime import timedelta
+import time
+import argparse
+from pulp import *
+from collections import defaultdict, deque
+
+# Konfiguriere Logging
+file_handler = logging.FileHandler('logfile.log')
+console_handler = logging.StreamHandler()
+logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s', handlers=[file_handler, console_handler])
+
+def count_crossings_via_variables(c_vars):
+    crossings = 0
+    for c_var in c_vars.values():
+        if c_var.varValue == 1:
+            crossings += 1
+    return crossings
+
+def prioritize_cycles(cycles):
+    # Beispiel: Zyklen nach Länge priorisieren
+    cycles.sort(key=len, reverse=False)
+    return cycles
+
+def detect_cycle(graph, node, visited, rec_stack, path, cycles):
+    visited[node] = True
+    rec_stack[node] = True
+    path.append(node)
+
+    for neighbor in graph[node]:
+        if not visited[neighbor]:
+            detect_cycle(graph, neighbor, visited, rec_stack, path, cycles)
+        elif rec_stack[neighbor]:
+            cycle_start = path.index(neighbor)
+            cycles.append(path[cycle_start:].copy())
+
+    rec_stack[node] = False
+    path.pop()
+
+def find_all_cycles(graph, nodes):
+    visited = {node: False for node in nodes}
+    rec_stack = {node: False for node in nodes}
+    path = []
+    cycles = []
+
+    for node in nodes:
+        if not visited[node]:
+            detect_cycle(graph, node, visited, rec_stack, path, cycles)
+
+    # Entferne doppelte Zyklen
+    unique_cycles = []
+    seen = set()
+    for cycle in cycles:
+        cycle_tuple = tuple(sorted(cycle))
+        if cycle_tuple not in seen:
+            seen.add(cycle_tuple)
+            unique_cycles.append(cycle)
+
+    return unique_cycles
+
+def add_cycle_constraints(prob, y, cycles, added_constraints):
+    for cycle in cycles:
+        cycle = list(dict.fromkeys(cycle))
+        for i in range(len(cycle)):
+            for j in range(i + 1, len(cycle)):
+                constraint_1 = ((cycle[i], cycle[j]), (cycle[j], cycle[i]))
+                if constraint_1 not in added_constraints:
+                    # einer von beiden ist wahr
+                    prob += y[(cycle[i], cycle[j])] + y[(cycle[j], cycle[i])] == 1
+                    added_constraints.add(constraint_1)
+                    
+                for k in range(j + 1, len(cycle)):
+                    if (cycle[i], cycle[j]) in y and (cycle[j], cycle[k]) in y and (cycle[i], cycle[k]) in y:
+                        constraint_2 = ((cycle[i], cycle[j]), (cycle[j], cycle[k]), (cycle[i], cycle[k]))
+                        if constraint_2 not in added_constraints:
+                            #i < j und j < l -> i< k reihenfolgen constraint transitivität
+                            prob += 0 <= y[(cycle[i], cycle[j])] + y[(cycle[j], cycle[k])] <= 1 + y[(cycle[i], cycle[k])]
+                            added_constraints.add(constraint_2)
+        logging.info(f"Constraint hinzugefügt für: {cycle}")
+    logging.info(f"Constraint hinzugefügt für alle bestehenden cycles.")
+
+def solve_bipartite_minimization(input_file, output_file):
+    start_time = time()
+    logging.info(f"Prozess für {input_file} gestartet")
+    # Extrahiere den Basisnamen der Eingabedatei
+    base_name = os.path.basename(input_file)
+    # Erstelle den Ausgabepfad
+    logging.info(f"Die Ausgabedatei wird {output_file} sein")
+
+    edges = []
+    with open(input_file, "r") as file:
+        for line in file:
+            if line.startswith('c'):
+                continue
+            elif line.startswith('p'):
+                parts = line.split()
+                n0 = int(parts[2])
+                n1 = int(parts[3])
+                logging.info(f"Größen der Partitionen: A={n0}, B={n1}")
+            else:
+                x, y = map(int, line.split())
+                edges.append((x, y))
+    logging.info(f"{len(edges)} Kanten geladen.")
+
+    prob = LpProblem("Minimize_Crossings", LpMinimize)
+    y = {(i, j): LpVariable(f"y_{i}_{j}", 0, 1, cat='Binary') for i in range(n0 + 1, n0 + n1 + 1) for j in range(n0 + 1, n0 + n1 + 1) if i != j}
+    c = {(i, j, k, l): LpVariable(f"c_{i}_{j}_{k}_{l}", 0, 1, cat='Binary') for (i, j) in edges for (k, l) in edges}
+    logging.info("x, y und c geladen.")
+
+    prob += lpSum(c.values())
+    logging.info("Zielfunktion aufgestellt.")
+
+    for (i, j) in edges:
+        for (k, l) in edges:
+            if k > i:
+                if j > l:
+                    prob += c[(i, j, k, l)] == y[(l, j)]
+                elif l > j:
+                    prob += c[(i, j, k, l)] == y[(l, j)]
+    logging.info("Crossing Constraints aufgestellt.")
+
+    added_constraints = set()
+    iteration = 0
+    max_cycle_length = 9
+
+    while True:
+        iteration += 1
+        prob.solve()
+        logging.info(f"Status der Lösung: {LpStatus[prob.status]}")
+        if prob.status != LpStatusOptimal:
+            logging.warning("Keine optimale Lösung gefunden.")
+            break
+
+        graph = defaultdict(list)
+        in_degree = defaultdict(int)
+        nodes = range(n0 + 1, n0 + n1 + 1)
+        for i in nodes:
+            for j in nodes:
+                if i != j:
+                    y_ij = y.get((i, j))
+                    if y_ij is not None:
+                        if y_ij.varValue == 1:
+                            graph[i].append(j)
+                            in_degree[j] += 1
+                        elif y_ij.varValue == 0:
+                            graph[j].append(i)
+                            in_degree[i] += 1
+
+        all_cycles = find_all_cycles(graph, nodes)
+        logging.info(f"Alle gefundenen Zyklen: {len(all_cycles)} Stück.")
+        
+        if not all_cycles:
+            break
+
+        prioritized_cycles = prioritize_cycles(all_cycles)
+        logging.info(f"Priorisierte Zyklen aufgestellt.")
+
+        # Berechne k basierend auf dem aktuellen Prozentsatz
+        # Überprüfe die Länge des längsten Zyklus und passe k entsprechend an
+        if len(prioritized_cycles[-1]) > max_cycle_length:
+            k = min(int(math.floor(len(prioritized_cycles)/4)), len(prioritized_cycles))
+        else:
+            k = len(prioritized_cycles)
+        # Nehme nur die ersten k Zyklen aus der priorisierten Liste
+        cycles_to_process = prioritized_cycles[:k]
+        logging.info(f"Zyklen zur Verarbeitung in dieser Iteration: {cycles_to_process}")
+        logging.info(f"Anzahl der Zyklen zur Verarbeitung in dieser Iteration: {len(cycles_to_process)}")
+        add_cycle_constraints(prob, y, cycles_to_process, added_constraints)
+
+    if prob.status == LpStatusOptimal:
+        logging.info("Optimale Lösung gefunden. Ergebnisse werden gespeichert.")
+
+        zero_in_degree_queue = deque([i for i in nodes if in_degree[i] == 0])
+        sorted_b = []
+        while zero_in_degree_queue:
+            node = zero_in_degree_queue.popleft()
+            sorted_b.append(node)
+            for neighbor in graph[node]:
+                in_degree[neighbor] -= 1
+                if in_degree[neighbor] == 0:
+                    zero_in_degree_queue.append(neighbor)
+
+        a = count_crossings_via_variables(c)
+        os.makedirs(os.path.dirname(output_file), exist_ok=True)
+        with open(output_file, 'w') as f:
+            for b in sorted_b:
+                f.write(f"{b}\n")
+                print(f"{b}")
+        print("Crossings: ", a) 
+        logging.info(f"Ergebnisse in {output_file} gespeichert")
+
+        end_time = time()
+        elapsed_time = end_time - start_time
+        elapsed_time_td = timedelta(seconds=elapsed_time)
+        formatted_time = str(elapsed_time_td)
+        formatted_time = formatted_time[:-3] 
+        logging.info(f"Verstrichene Zeit: {formatted_time} (h:m:s.ms) \n")
+    else:
+        logging.warning("Keine optimale Lösung gefunden.")
+        end_time = time()
+        elapsed_time = end_time - start_time
+        elapsed_time_td = timedelta(seconds=elapsed_time)
+        formatted_time = str(elapsed_time_td)
+        formatted_time = formatted_time[:-3] 
+        logging.info(f"Verstrichene Zeit: {formatted_time} (h:m:s.ms) \n")
+
+def main():
+    parser = argparse.ArgumentParser(description="Solve Bipartite Minimization Problem")
+    parser.add_argument('input_file', type=str, nargs='?', help="The path to the input file")
+    parser.add_argument('output_file', type=str, nargs='?', help="The path to the output file")
+    args = parser.parse_args()
+
+    if not args.input_file:
+        print("Error: Es wurde keine Eingabedatei angegeben.")
+        return
+
+    input_file = args.input_file
+    output_file = args.output_file if args.output_file else 'default_output.txt'
+
+    logging.info(f"Prozess für {input_file} gestartet")
+    logging.info(f"Die Ausgabedatei wird {output_file} sein")
+
+    solve_bipartite_minimization(input_file, output_file)
+
+if __name__ == "__main__":
+    main()