#define _GNU_SOURCE #include #include #include #include #include #include "thickenings.h" #include "coxeter.h" #include "queue.h" char *alphabetize(int *word, int len, const char *alphabet, char *buffer) { int i = 0; for(i = 0; i < len; i++) buffer[i] = alphabet[word[i]]; buffer[i] = 0; return buffer; } void print_thickening(int rank, int order, const signed char *thickening, int is_fat, int is_slim, int conflict, const char *alphabet, FILE *f) { for(int i = 0; i < order; i++) { if(thickening[i] == HEAD_MARKER) fprintf(f, "\e[41mx\e[39m\e[49m"); else if(thickening[i] < 0 && thickening[i] > -10) fprintf(f, "\e[47m\e[30m%d\e[39m\e[49m", -thickening[i]); else if(thickening[i] <= -10) fprintf(f, "\e[47m\e[30m+\e[39m\e[49m"); else if(thickening[i] > 0 && thickening[i] < 10) fprintf(f, "%d", thickening[i]); else if(thickening[i] >= 10) fprintf(f, "+"); else fprintf(f, " "); } if(is_fat) fprintf(f, " F"); if(is_slim) fprintf(f, " S"); if(conflict) fprintf(f, " C"); fprintf(f, "\e[K\n"); } void print_balanced_thickening(int rank, int order, const signed char *thickening, const int *left_invariant, const int *right_invariant, const char *alphabet, FILE *f) { for(int i = 0; i < order; i++) { if(thickening[i]) fprintf(f, "x"); else fprintf(f, "0"); } fprintf(f, " left: "); for(int j = 0; j < rank; j++) if(left_invariant[j]) fprintf(f, "%c", alphabet[j]); else fprintf(f, " "); fprintf(f, " right: "); for(int j = 0; j < rank; j++) if(right_invariant[j]) fprintf(f, "%c", alphabet[j]); else fprintf(f, " "); fprintf(f, "\n"); } static int compare_wordlength(const void *a, const void *b, void *gr) { int i = *((int*)a); int j = *((int*)b); node_t *graph = (node_t*)gr; return graph[i].wordlength - graph[j].wordlength; } void prepare_graph(semisimple_type_t type, node_t *graph, edgelist_t **edgelists_pointer, int **words_pointer) // the edgelists_pointer and words_pointer arguments are just for freeing afterwards { queue_t queue; int rank, order; edgelist_t *edge, *previous; int edgelist_count, max_wordlength, hyperplane_count; int current; int *graph_data; node_t *graph_unsorted; int *wordlength_order, *reverse_wordlength_order, *seen, *words; edgelist_t *edgelists; // initialize rank = coxeter_rank(type); order = coxeter_order(type); graph_data = (int*)malloc(order*rank*sizeof(int)); graph_unsorted = (node_t*)malloc(order*sizeof(node_t)); wordlength_order = (int*)malloc(order*sizeof(int)); reverse_wordlength_order = (int*)malloc(order*sizeof(int)); seen = (int*)malloc(order*sizeof(int)); for(int i = 0; i < order; i++) { graph_unsorted[i].left = graph[i].left; graph_unsorted[i].right = graph[i].right; graph_unsorted[i].word = 0; graph_unsorted[i].wordlength = INT_MAX; graph_unsorted[i].bruhat_lower = 0; graph_unsorted[i].bruhat_higher = 0; graph_unsorted[i].is_hyperplane_reflection = 0; } // get coxeter graph generate_coxeter_graph(type, graph_data); for(int i = 0; i < order; i++) for(int j = 0; j < rank; j++) graph_unsorted[i].left[j] = graph_data[i*rank + j]; // find wordlengths graph_unsorted[0].wordlength = 0; queue_init(&queue); queue_put(&queue, 0); while((current = queue_get(&queue)) != -1) { for(int i = 0; i < rank; i++) { int neighbor = graph_unsorted[current].left[i]; if(graph_unsorted[neighbor].wordlength > graph_unsorted[current].wordlength + 1) { graph_unsorted[neighbor].wordlength = graph_unsorted[current].wordlength + 1; queue_put(&queue, neighbor); } } } max_wordlength = 0; for(int i = 0; i < order; i++) if(graph_unsorted[i].wordlength > max_wordlength) max_wordlength = graph_unsorted[i].wordlength; // sort by wordlength for(int i = 0; i < order; i++) wordlength_order[i] = i; qsort_r(wordlength_order, order, sizeof(int), compare_wordlength, graph_unsorted); // so wordlength_order is a map new index -> old index for(int i = 0; i < order; i++) reverse_wordlength_order[wordlength_order[i]] = i; // reverse_wordlength_order is a map old index -> new index for(int i = 0; i < order; i++) { graph[i] = graph_unsorted[wordlength_order[i]]; // copy the whole thing for(int j = 0; j < rank; j++) graph[i].left[j] = reverse_wordlength_order[graph[i].left[j]]; // rewrite references } // find words words = (int*)malloc(order*max_wordlength*sizeof(int)); memset(words, 0, order*max_wordlength*sizeof(int)); graph[0].word = &words[0]; queue_init(&queue); queue_put(&queue, 0); while((current = queue_get(&queue)) != -1) { for(int i = 0; i < rank; i++) { int neighbor = graph[current].left[i]; if(graph[neighbor].wordlength == graph[current].wordlength + 1 && graph[neighbor].word == 0) { graph[neighbor].word = &words[neighbor*max_wordlength]; memcpy(&graph[neighbor].word[1], &graph[current].word[0], graph[current].wordlength*sizeof(int)); graph[neighbor].word[0] = i; queue_put(&queue, neighbor); } } } // generate right edges for(int i = 0; i < order; i++) { for(int j = 0; j < rank; j++) { current = graph[0].left[j]; for(int k = graph[i].wordlength - 1; k >= 0; k--) { // apply group element from right to left current = graph[current].left[graph[i].word[k]]; } graph[i].right[j] = current; } } // find opposites node_t *longest = &graph[order-1]; for(int i = 0; i < order; i++) { current = i; for(int k = longest->wordlength - 1; k >= 0; k--) current = graph[current].left[longest->word[k]]; graph[i].opposite = current; } // enumerate hyperplanes hyperplane_count = 0; for(int i = 0; i < order; i++) { for(int j = 0; j < rank; j++) { current = 0; int *word1 = graph[i].word; int word1len = graph[i].wordlength; int *word2 = graph[graph[i].right[j]].word; // want to calculate word2 * word1^{-1} int word2len = graph[graph[i].right[j]].wordlength; for(int k = 0; k < word1len; k++) // apply inverse, i.e. go from left to right current = graph[current].left[word1[k]]; for(int k = word2len - 1; k >= 0; k--) // now from right to left current = graph[current].left[word2[k]]; if(graph[current].is_hyperplane_reflection == 0) { graph[current].is_hyperplane_reflection = 1; hyperplane_count++; } } } // generate folding order edgelists = (edgelist_t*)malloc(order*hyperplane_count*sizeof(edgelist_t)); edgelist_count = 0; for(int i = 0; i < order; i++) { if(graph[i].is_hyperplane_reflection) { for(int j = 0; j < order; j++) { current = j; for(int k = graph[i].wordlength - 1; k >= 0; k--) // apply hyperplane reflection current = graph[current].left[graph[i].word[k]]; if(graph[j].wordlength < graph[current].wordlength) { // current has higher bruhat order than j edgelists[edgelist_count].to = j; edgelists[edgelist_count].next = graph[current].bruhat_lower; graph[current].bruhat_lower = &edgelists[edgelist_count]; edgelist_count++; } else if(graph[j].wordlength > graph[current].wordlength) { // j has higher bruhat order than current; these are already included from the other side } else { ERROR(1, "Chambers of equal word lengths should not be folded on each other!\n"); } } } } // remove redundant edges for(int i = 0; i < order; i++) { memset(seen, 0, order*sizeof(int)); for(int len = 1; len <= max_wordlength; len++) { // remove all edges originating from i of length len which connect to something already seen using shorter edges edge = graph[i].bruhat_lower; previous = (edgelist_t*)0; while(edge) { if(seen[edge->to] && graph[i].wordlength - graph[edge->to].wordlength == len) { // fprintf(stderr, "deleting from %d to %d\n", i, edge->to); if(previous) previous->next = edge->next; else graph[i].bruhat_lower = edge->next; } else { previous = edge; } edge = edge->next; } // see which nodes we can reach using only edges up to length len, mark them as seen queue_init(&queue); queue_put(&queue, i); seen[i] = 1; while((current = queue_get(&queue)) != -1) { edge = graph[current].bruhat_lower; while(edge) { if(!seen[edge->to] && graph[current].wordlength - graph[edge->to].wordlength == len) { seen[edge->to] = 1; queue_put(&queue, edge->to); } edge = edge->next; } } } } // reverse folding order for(int i = 0; i < order; i++) { edge = graph[i].bruhat_lower; while(edge) { edgelists[edgelist_count].to = i; edgelists[edgelist_count].next = graph[edge->to].bruhat_higher; graph[edge->to].bruhat_higher = &edgelists[edgelist_count]; edgelist_count++; edge = edge->next; } } *edgelists_pointer = edgelists; *words_pointer = words; free(graph_data); free(graph_unsorted); free(wordlength_order); free(reverse_wordlength_order); free(seen); } void enumerate_balanced_thickenings(semisimple_type_t type, node_t *graph, const char *alphabet, FILE *outfile) { int rank, order; signed char *level; long count; int is_fat, is_slim; int current_level, head, current; int i; edgelist_t *edge; queue_t queue; rank = coxeter_rank(type); order = coxeter_order(type); level = (signed char*)malloc(order*sizeof(int)); count = 0; memset(level, 0, order*sizeof(int)); current_level = 1; head = order - 1; level[head] = HEAD_MARKER; while(current_level > 0) { // calculate transitive closure; that is, fill current_level in every spot which must be marked with the current hat (but was not already before), and -current_level in every opposite spot (including opposite to the head) is_slim = 1; queue_init(&queue); level[graph[head].opposite] = -current_level; queue_put(&queue, head); for(int i = head + 1; level[i] != HEAD_MARKER && i < order; i++) { // everything which is right to the head and empty will not get marked in this or higher levels, so we can mark its opposite if(level[i] == current_level) { is_slim = 0; break; } if(level[i] == 0) { level[i] = -current_level; level[graph[i].opposite] = current_level; queue_put(&queue, graph[i].opposite); } } if(is_slim) { while((current = queue_get(&queue)) != -1) { edge = graph[current].bruhat_lower; while(edge) { if(level[edge->to] < 0) { is_slim = 0; break; } else if(level[edge->to] == 0) { level[edge->to] = current_level; level[graph[edge->to].opposite] = -current_level; queue_put(&queue, edge->to); } edge = edge->next; } } } // now we have something, check if it is a balanced thickening if(is_slim) { is_fat = 1; for(int i = 0; i < order; i++) { if(level[i] == 0) { is_fat = 0; break; } } if(is_fat) { // ERROR(count >= MAX_THICKENINGS, "Too many balanced thickenings! Increase MAX_THICKENINGS\n"); //if(count % 10000000 == 0) //print_thickening(rank, order, level, 0, 0, 0, alphabet, stderr); fwrite(level, sizeof(signed char), order, outfile); count++; } } // now find the next one! // try to find empty spot to the left of "head", but only if it is slim, as otherwise there is no point in adding even more if(is_slim) { for(i = head - 1; i >= 0; i--) if(level[i] == 0) break; if(i >= 0) { head = i; level[head] = HEAD_MARKER; current_level++; ERROR(current_level >= HEAD_MARKER, "HEAD_MARKER to small!\n"); continue; } } // if none was found, try to move "head" to the left while(current_level > 0) { for(i = head - 1; i >= 0; i--) if(level[i] == 0 || level[i] >= current_level || level[i] <= -current_level) break; if(i >= 0) { // if this was successful, just move head level[head] = 0; head = i; level[head] = HEAD_MARKER; break; } else { // if moving the head is not possible, take the next head to the right current_level--; level[head] = 0; do { head++; } while(head < order && level[head] != HEAD_MARKER); } } // clean up for(int i = 0; i < order; i++) if(level[i] >= current_level && level[i] != HEAD_MARKER || level[i] <= -current_level) level[i] = 0; } fprintf(stderr, "\n"); fprintf(stderr, "Found %ld balanced thickenings\n\n", count); free(level); }