enumerate-balanced-ideals/thickenings.c

431 lines
12 KiB
C
Raw Normal View History

2016-06-09 19:11:20 +00:00
#define _GNU_SOURCE
#include <stdio.h>
#include <limits.h>
#include <stdlib.h>
#include <malloc.h>
#include <memory.h>
2016-07-26 08:09:34 +00:00
#include "thickenings.h"
2016-06-09 19:11:20 +00:00
#include "coxeter.h"
#include "queue.h"
2016-07-26 08:09:34 +00:00
char *alphabetize(int *word, int len, const char *alphabet, char *buffer)
2016-06-09 19:11:20 +00:00
{
int i = 0;
for(i = 0; i < len; i++)
buffer[i] = alphabet[word[i]];
buffer[i] = 0;
return buffer;
}
void print_thickening(int rank, int order, const signed char *thickening, int is_fat, int is_slim, int conflict, const char *alphabet, FILE *f)
2016-08-26 12:56:23 +00:00
{
for(int i = 0; i < order; i++) {
if(thickening[i] == HEAD_MARKER)
fprintf(f, "\e[41mx\e[39m\e[49m");
2016-08-26 12:56:23 +00:00
else if(thickening[i] < 0 && thickening[i] > -10)
fprintf(f, "\e[47m\e[30m%d\e[39m\e[49m", -thickening[i]);
2016-08-26 12:56:23 +00:00
else if(thickening[i] <= -10)
fprintf(f, "\e[47m\e[30m+\e[39m\e[49m");
2016-08-26 12:56:23 +00:00
else if(thickening[i] > 0 && thickening[i] < 10)
fprintf(f, "%d", thickening[i]);
else if(thickening[i] >= 10)
fprintf(f, "+");
else
fprintf(f, " ");
}
if(is_fat)
fprintf(f, " F");
if(is_slim)
fprintf(f, " S");
if(conflict)
fprintf(f, " C");
fprintf(f, "\e[K\n");
2016-08-26 12:56:23 +00:00
}
void print_balanced_thickening(int rank, int order, const signed char *thickening, const int *left_invariant, const int *right_invariant, const char *alphabet, FILE *f)
2016-06-20 08:37:21 +00:00
{
for(int i = 0; i < order; i++) {
if(thickening[i])
2016-07-26 08:09:34 +00:00
fprintf(f, "x");
2016-06-20 08:37:21 +00:00
else
2016-07-26 08:09:34 +00:00
fprintf(f, "0");
2016-06-20 08:37:21 +00:00
}
2016-07-26 08:09:34 +00:00
fprintf(f, " left: ");
2016-06-20 08:37:21 +00:00
for(int j = 0; j < rank; j++)
if(left_invariant[j])
2016-07-26 08:09:34 +00:00
fprintf(f, "%c", alphabet[j]);
2016-06-20 08:37:21 +00:00
else
2016-07-26 08:09:34 +00:00
fprintf(f, " ");
2016-06-20 08:37:21 +00:00
2016-07-26 08:09:34 +00:00
fprintf(f, " right: ");
2016-06-20 08:37:21 +00:00
for(int j = 0; j < rank; j++)
if(right_invariant[j])
2016-07-26 08:09:34 +00:00
fprintf(f, "%c", alphabet[j]);
2016-06-20 08:37:21 +00:00
else
2016-07-26 08:09:34 +00:00
fprintf(f, " ");
2016-06-20 08:37:21 +00:00
2016-07-26 08:09:34 +00:00
fprintf(f, "\n");
2016-06-20 08:37:21 +00:00
}
2016-06-09 19:11:20 +00:00
static int compare_wordlength(const void *a, const void *b, void *gr)
{
int i = *((int*)a);
int j = *((int*)b);
node_t *graph = (node_t*)gr;
return graph[i].wordlength - graph[j].wordlength;
}
2016-07-26 08:09:34 +00:00
void prepare_graph(semisimple_type_t type, node_t *graph, edgelist_t **edgelists_pointer, int **words_pointer) // the edgelists_pointer and words_pointer arguments are just for freeing afterwards
2016-06-09 19:11:20 +00:00
{
queue_t queue;
int rank, order;
2016-07-26 08:09:34 +00:00
edgelist_t *edge, *previous;
int edgelist_count, max_wordlength, hyperplane_count;
int current;
2016-06-09 19:11:20 +00:00
2016-07-26 08:09:34 +00:00
int *graph_data;
node_t *graph_unsorted;
int *wordlength_order, *reverse_wordlength_order, *seen, *words;
edgelist_t *edgelists;
2016-06-09 19:11:20 +00:00
2016-07-26 08:09:34 +00:00
// initialize
2016-06-09 19:11:20 +00:00
rank = coxeter_rank(type);
order = coxeter_order(type);
graph_data = (int*)malloc(order*rank*sizeof(int));
2016-07-26 08:09:34 +00:00
graph_unsorted = (node_t*)malloc(order*sizeof(node_t));
2016-06-09 19:11:20 +00:00
wordlength_order = (int*)malloc(order*sizeof(int));
reverse_wordlength_order = (int*)malloc(order*sizeof(int));
seen = (int*)malloc(order*sizeof(int));
for(int i = 0; i < order; i++) {
2016-07-26 08:09:34 +00:00
graph_unsorted[i].left = graph[i].left;
graph_unsorted[i].right = graph[i].right;
2016-06-09 19:11:20 +00:00
graph_unsorted[i].word = 0;
graph_unsorted[i].wordlength = INT_MAX;
graph_unsorted[i].bruhat_lower = 0;
graph_unsorted[i].bruhat_higher = 0;
graph_unsorted[i].is_hyperplane_reflection = 0;
}
2016-07-26 08:09:34 +00:00
// get coxeter graph
generate_coxeter_graph(type, graph_data);
for(int i = 0; i < order; i++)
for(int j = 0; j < rank; j++)
graph_unsorted[i].left[j] = graph_data[i*rank + j];
// find wordlengths
2016-06-09 19:11:20 +00:00
graph_unsorted[0].wordlength = 0;
queue_init(&queue);
queue_put(&queue, 0);
while((current = queue_get(&queue)) != -1) {
for(int i = 0; i < rank; i++) {
int neighbor = graph_unsorted[current].left[i];
if(graph_unsorted[neighbor].wordlength > graph_unsorted[current].wordlength + 1) {
graph_unsorted[neighbor].wordlength = graph_unsorted[current].wordlength + 1;
queue_put(&queue, neighbor);
}
}
}
max_wordlength = 0;
for(int i = 0; i < order; i++)
if(graph_unsorted[i].wordlength > max_wordlength)
max_wordlength = graph_unsorted[i].wordlength;
2016-07-26 08:09:34 +00:00
// sort by wordlength
2016-06-09 19:11:20 +00:00
for(int i = 0; i < order; i++)
wordlength_order[i] = i;
qsort_r(wordlength_order, order, sizeof(int), compare_wordlength, graph_unsorted); // so wordlength_order is a map new index -> old index
for(int i = 0; i < order; i++)
reverse_wordlength_order[wordlength_order[i]] = i; // reverse_wordlength_order is a map old index -> new index
for(int i = 0; i < order; i++) {
graph[i] = graph_unsorted[wordlength_order[i]]; // copy the whole thing
for(int j = 0; j < rank; j++)
graph[i].left[j] = reverse_wordlength_order[graph[i].left[j]]; // rewrite references
}
2016-07-26 08:09:34 +00:00
// find words
2016-06-09 19:11:20 +00:00
words = (int*)malloc(order*max_wordlength*sizeof(int));
memset(words, 0, order*max_wordlength*sizeof(int));
graph[0].word = &words[0];
queue_init(&queue);
queue_put(&queue, 0);
while((current = queue_get(&queue)) != -1) {
for(int i = 0; i < rank; i++) {
int neighbor = graph[current].left[i];
if(graph[neighbor].wordlength == graph[current].wordlength + 1 && graph[neighbor].word == 0) {
graph[neighbor].word = &words[neighbor*max_wordlength];
memcpy(&graph[neighbor].word[1], &graph[current].word[0], graph[current].wordlength*sizeof(int));
graph[neighbor].word[0] = i;
queue_put(&queue, neighbor);
}
}
}
2016-07-26 08:09:34 +00:00
// generate right edges
2016-06-09 19:11:20 +00:00
for(int i = 0; i < order; i++) {
for(int j = 0; j < rank; j++) {
current = graph[0].left[j];
for(int k = graph[i].wordlength - 1; k >= 0; k--) { // apply group element from right to left
current = graph[current].left[graph[i].word[k]];
}
graph[i].right[j] = current;
}
}
2016-07-26 08:09:34 +00:00
// find opposites
2016-06-09 19:11:20 +00:00
node_t *longest = &graph[order-1];
for(int i = 0; i < order; i++) {
current = i;
for(int k = longest->wordlength - 1; k >= 0; k--)
current = graph[current].left[longest->word[k]];
graph[i].opposite = current;
}
2016-07-26 08:09:34 +00:00
// enumerate hyperplanes
2016-06-09 19:11:20 +00:00
hyperplane_count = 0;
for(int i = 0; i < order; i++) {
for(int j = 0; j < rank; j++) {
current = 0;
int *word1 = graph[i].word;
int word1len = graph[i].wordlength;
int *word2 = graph[graph[i].right[j]].word; // want to calculate word2 * word1^{-1}
int word2len = graph[graph[i].right[j]].wordlength;
for(int k = 0; k < word1len; k++) // apply inverse, i.e. go from left to right
current = graph[current].left[word1[k]];
for(int k = word2len - 1; k >= 0; k--) // now from right to left
current = graph[current].left[word2[k]];
if(graph[current].is_hyperplane_reflection == 0) {
graph[current].is_hyperplane_reflection = 1;
hyperplane_count++;
}
}
}
2016-07-26 08:09:34 +00:00
// generate folding order
2016-06-09 19:11:20 +00:00
edgelists = (edgelist_t*)malloc(order*hyperplane_count*sizeof(edgelist_t));
2016-07-26 08:09:34 +00:00
edgelist_count = 0;
2016-06-09 19:11:20 +00:00
for(int i = 0; i < order; i++) {
if(graph[i].is_hyperplane_reflection) {
for(int j = 0; j < order; j++) {
current = j;
for(int k = graph[i].wordlength - 1; k >= 0; k--) // apply hyperplane reflection
current = graph[current].left[graph[i].word[k]];
if(graph[j].wordlength < graph[current].wordlength) { // current has higher bruhat order than j
edgelists[edgelist_count].to = j;
edgelists[edgelist_count].next = graph[current].bruhat_lower;
graph[current].bruhat_lower = &edgelists[edgelist_count];
edgelist_count++;
} else if(graph[j].wordlength > graph[current].wordlength) { // j has higher bruhat order than current; these are already included from the other side
} else {
ERROR(1, "Chambers of equal word lengths should not be folded on each other!\n");
}
}
}
}
2016-07-26 08:09:34 +00:00
// remove redundant edges
2016-06-09 19:11:20 +00:00
for(int i = 0; i < order; i++) {
memset(seen, 0, order*sizeof(int));
for(int len = 1; len <= max_wordlength; len++) {
// remove all edges originating from i of length len which connect to something already seen using shorter edges
edge = graph[i].bruhat_lower;
previous = (edgelist_t*)0;
while(edge) {
if(seen[edge->to] && graph[i].wordlength - graph[edge->to].wordlength == len) {
2016-07-26 08:09:34 +00:00
// fprintf(stderr, "deleting from %d to %d\n", i, edge->to);
2016-06-09 19:11:20 +00:00
if(previous)
previous->next = edge->next;
else
graph[i].bruhat_lower = edge->next;
} else {
previous = edge;
}
edge = edge->next;
}
// see which nodes we can reach using only edges up to length len, mark them as seen
queue_init(&queue);
queue_put(&queue, i);
seen[i] = 1;
while((current = queue_get(&queue)) != -1) {
edge = graph[current].bruhat_lower;
while(edge) {
if(!seen[edge->to] && graph[current].wordlength - graph[edge->to].wordlength == len) {
seen[edge->to] = 1;
queue_put(&queue, edge->to);
}
edge = edge->next;
}
}
}
}
2016-07-26 08:09:34 +00:00
// reverse folding order
2016-06-09 19:11:20 +00:00
for(int i = 0; i < order; i++) {
edge = graph[i].bruhat_lower;
while(edge) {
edgelists[edgelist_count].to = i;
edgelists[edgelist_count].next = graph[edge->to].bruhat_higher;
graph[edge->to].bruhat_higher = &edgelists[edgelist_count];
edgelist_count++;
edge = edge->next;
}
}
2016-07-26 08:09:34 +00:00
*edgelists_pointer = edgelists;
*words_pointer = words;
free(graph_data);
free(graph_unsorted);
free(wordlength_order);
free(reverse_wordlength_order);
free(seen);
}
void enumerate_balanced_thickenings(semisimple_type_t type, node_t *graph, const char *alphabet, FILE *outfile)
{
int rank, order;
signed char *level;
long count;
2016-07-26 08:09:34 +00:00
int is_fat, is_slim;
int current_level, head, current;
int i;
edgelist_t *edge;
2016-06-09 19:11:20 +00:00
2016-07-26 08:09:34 +00:00
queue_t queue;
rank = coxeter_rank(type);
order = coxeter_order(type);
level = (signed char*)malloc(order*sizeof(int));
2016-06-09 19:11:20 +00:00
count = 0;
2016-06-09 19:11:20 +00:00
memset(level, 0, order*sizeof(int));
current_level = 1;
head = order - 1;
level[head] = HEAD_MARKER;
2016-06-09 19:11:20 +00:00
while(current_level > 0) {
// calculate transitive closure; that is, fill current_level in every spot which must be marked with the current hat (but was not already before), and -current_level in every opposite spot (including opposite to the head)
is_slim = 1;
2016-06-09 19:11:20 +00:00
queue_init(&queue);
level[graph[head].opposite] = -current_level;
2016-06-09 19:11:20 +00:00
queue_put(&queue, head);
for(int i = head + 1; level[i] != HEAD_MARKER && i < order; i++) { // everything which is right to the head and empty will not get marked in this or higher levels, so we can mark its opposite
if(level[i] == current_level) {
is_slim = 0;
break;
} if(level[i] == 0) {
level[i] = -current_level;
2016-08-26 12:56:23 +00:00
level[graph[i].opposite] = current_level;
queue_put(&queue, graph[i].opposite);
}
}
if(is_slim) {
while((current = queue_get(&queue)) != -1) {
edge = graph[current].bruhat_lower;
while(edge) {
if(level[edge->to] < 0) {
is_slim = 0;
break;
} else if(level[edge->to] == 0) {
level[edge->to] = current_level;
level[graph[edge->to].opposite] = -current_level;
queue_put(&queue, edge->to);
}
edge = edge->next;
2016-06-09 19:11:20 +00:00
}
}
}
// now we have something, check if it is a balanced thickening
if(is_slim) {
is_fat = 1;
for(int i = 0; i < order; i++) {
if(level[i] == 0) {
2016-06-09 19:11:20 +00:00
is_fat = 0;
break;
}
2016-06-09 19:11:20 +00:00
}
if(is_fat) {
// ERROR(count >= MAX_THICKENINGS, "Too many balanced thickenings! Increase MAX_THICKENINGS\n");
if(count % 10000000 == 0)
print_thickening(rank, order, level, 0, 0, 0, alphabet, stderr);
// fwrite(level, sizeof(signed char), order, outfile);
count++;
2016-06-09 19:11:20 +00:00
}
}
2016-07-26 08:09:34 +00:00
// now find the next one!
2016-08-26 12:56:23 +00:00
// try to find empty spot to the left of "head", but only if it is slim, as otherwise there is no point in adding even more
if(is_slim) {
2016-08-26 12:56:23 +00:00
for(i = head - 1; i >= 0; i--)
if(level[i] == 0)
break;
if(i >= 0) {
head = i;
level[head] = HEAD_MARKER;
2016-08-26 12:56:23 +00:00
current_level++;
ERROR(current_level >= HEAD_MARKER, "HEAD_MARKER to small!\n");
2016-08-26 12:56:23 +00:00
continue;
}
2016-06-09 19:11:20 +00:00
}
// if none was found, try to move "head" to the left
while(current_level > 0) {
for(i = head - 1; i >= 0; i--)
2016-08-26 12:56:23 +00:00
if(level[i] == 0 || level[i] >= current_level || level[i] <= -current_level)
2016-06-09 19:11:20 +00:00
break;
if(i >= 0) { // if this was successful, just move head
level[head] = 0;
head = i;
level[head] = HEAD_MARKER;
2016-06-09 19:11:20 +00:00
break;
} else { // if moving the head is not possible, take the next head to the right
current_level--;
level[head] = 0;
do {
head++;
} while(head < order && level[head] != HEAD_MARKER);
2016-06-09 19:11:20 +00:00
}
}
// clean up
2016-08-26 12:56:23 +00:00
for(int i = 0; i < order; i++)
if(level[i] >= current_level && level[i] != HEAD_MARKER || level[i] <= -current_level)
2016-06-09 19:11:20 +00:00
level[i] = 0;
}
2016-07-26 08:09:34 +00:00
fprintf(stderr, "\n");
fprintf(stderr, "Found %ld balanced thickenings\n\n", count);
2016-06-20 08:37:21 +00:00
2016-06-09 19:11:20 +00:00
free(level);
}