2016-06-09 19:11:20 +00:00
|
|
|
#define _GNU_SOURCE
|
|
|
|
|
|
|
|
#include <stdio.h>
|
|
|
|
#include <limits.h>
|
|
|
|
#include <stdlib.h>
|
|
|
|
#include <malloc.h>
|
|
|
|
#include <memory.h>
|
|
|
|
|
|
|
|
#include "coxeter.h"
|
|
|
|
#include "queue.h"
|
|
|
|
|
|
|
|
#define DEBUG(msg, ...) do{fprintf(stderr, msg, ##__VA_ARGS__); }while(0)
|
|
|
|
|
2016-06-20 08:37:21 +00:00
|
|
|
#define MAX_THICKENINGS 10000
|
|
|
|
|
2016-06-09 19:11:20 +00:00
|
|
|
typedef struct _edgelist {
|
|
|
|
int to;
|
|
|
|
struct _edgelist *next;
|
|
|
|
} edgelist_t;
|
|
|
|
|
|
|
|
typedef struct {
|
|
|
|
int *word;
|
|
|
|
int wordlength;
|
|
|
|
int *left;
|
|
|
|
int *right;
|
|
|
|
int opposite;
|
|
|
|
edgelist_t *bruhat_lower;
|
|
|
|
edgelist_t *bruhat_higher;
|
|
|
|
int is_hyperplane_reflection; // boolean value
|
|
|
|
} node_t;
|
|
|
|
|
|
|
|
static char *alphabetize(int *word, int len, const char *alphabet, char *buffer)
|
|
|
|
{
|
|
|
|
int i = 0;
|
|
|
|
for(i = 0; i < len; i++)
|
|
|
|
buffer[i] = alphabet[word[i]];
|
|
|
|
buffer[i] = 0;
|
|
|
|
|
|
|
|
return buffer;
|
|
|
|
}
|
|
|
|
|
2016-06-20 08:37:21 +00:00
|
|
|
static void print_balanced_thickening(int rank, int order, const int *thickening, const int *left_invariant, const int *right_invariant, const char *alphabet)
|
|
|
|
{
|
|
|
|
for(int i = 0; i < order; i++) {
|
|
|
|
if(thickening[i])
|
|
|
|
printf("x");
|
|
|
|
else
|
|
|
|
printf("0");
|
|
|
|
}
|
|
|
|
|
|
|
|
printf(" left: ");
|
|
|
|
for(int j = 0; j < rank; j++)
|
|
|
|
if(left_invariant[j])
|
|
|
|
printf("%c", alphabet[j]);
|
|
|
|
else
|
|
|
|
printf(" ");
|
|
|
|
|
|
|
|
printf(" right: ");
|
|
|
|
for(int j = 0; j < rank; j++)
|
|
|
|
if(right_invariant[j])
|
|
|
|
printf("%c", alphabet[j]);
|
|
|
|
else
|
|
|
|
printf(" ");
|
|
|
|
|
|
|
|
printf("\n");
|
|
|
|
}
|
|
|
|
|
2016-06-09 19:11:20 +00:00
|
|
|
static int compare_wordlength(const void *a, const void *b, void *gr)
|
|
|
|
{
|
|
|
|
int i = *((int*)a);
|
|
|
|
int j = *((int*)b);
|
|
|
|
node_t *graph = (node_t*)gr;
|
|
|
|
|
|
|
|
return graph[i].wordlength - graph[j].wordlength;
|
|
|
|
}
|
|
|
|
|
|
|
|
int main(int argc, const char *argv[])
|
|
|
|
{
|
|
|
|
queue_t queue;
|
|
|
|
|
|
|
|
// heap stuff
|
|
|
|
node_t *graph, *graph_unsorted;
|
|
|
|
int *graph_data;
|
|
|
|
int *wordlength_order, *reverse_wordlength_order, *seen, *level;
|
|
|
|
int *words;
|
|
|
|
edgelist_t *edgelists;
|
|
|
|
int *left, *right;
|
|
|
|
int *left_invariant, *right_invariant;
|
|
|
|
|
|
|
|
edgelist_t *edge, *previous;
|
|
|
|
int rank, order;
|
|
|
|
semisimple_type_t type;
|
|
|
|
int edgelist_count, hyperplane_count, max_wordlength;
|
|
|
|
int current, head, i, current_level;
|
|
|
|
int is_fat, is_slim;
|
|
|
|
int thickenings_count, fat_count, slim_count, balanced_count;
|
|
|
|
|
2016-06-20 08:37:21 +00:00
|
|
|
int *balanced_thickenings;
|
|
|
|
|
2016-06-09 19:11:20 +00:00
|
|
|
char *string_buffer1, *string_buffer2;
|
|
|
|
|
|
|
|
const char *alphabet = "abcdefghijklmnopqrstuvwxyz";
|
|
|
|
|
|
|
|
ERROR(argc < 2, "Too few arguments!\n");
|
|
|
|
|
|
|
|
type.n = argc - 1;
|
|
|
|
type.factors = (simple_type_t*)malloc((argc-1)*sizeof(simple_type_t));
|
|
|
|
for(int i = 0; i < argc - 1; i++) {
|
|
|
|
type.factors[i].series = argv[i+1][0];
|
|
|
|
type.factors[i].rank = argv[i+1][1] - '0';
|
|
|
|
ERROR(argv[i+1][0] < 'A' || argv[i+1][0] > 'I' || argv[i+1][1] < '1' || argv[i+1][1] > '9', "Arguments must be Xn with X out of A-I and n out of 0-9\n");
|
|
|
|
}
|
|
|
|
|
|
|
|
rank = coxeter_rank(type);
|
|
|
|
order = coxeter_order(type);
|
|
|
|
|
|
|
|
ERROR(strlen(alphabet) < rank, "The alphabet has too few letters\n");
|
|
|
|
|
2016-06-20 08:37:21 +00:00
|
|
|
// DEBUG("The group has rank %d and order %d\n", rank, order);
|
2016-06-09 19:11:20 +00:00
|
|
|
|
|
|
|
graph = (node_t*)malloc(order*sizeof(node_t));
|
|
|
|
graph_unsorted = (node_t*)malloc(order*sizeof(node_t));
|
|
|
|
graph_data = (int*)malloc(order*rank*sizeof(int));
|
|
|
|
wordlength_order = (int*)malloc(order*sizeof(int));
|
|
|
|
reverse_wordlength_order = (int*)malloc(order*sizeof(int));
|
|
|
|
seen = (int*)malloc(order*sizeof(int));
|
|
|
|
level = (int*)malloc(order*sizeof(int));
|
|
|
|
left = (int*)malloc(order*rank*sizeof(int));
|
|
|
|
right = (int*)malloc(order*rank*sizeof(int));
|
|
|
|
left_invariant = (int*)malloc(rank*sizeof(int));
|
|
|
|
right_invariant = (int*)malloc(rank*sizeof(int));
|
2016-06-20 08:37:21 +00:00
|
|
|
balanced_thickenings = (int*)malloc(MAX_THICKENINGS*order*sizeof(int));
|
2016-06-09 19:11:20 +00:00
|
|
|
|
2016-06-20 08:37:21 +00:00
|
|
|
// DEBUG("Generate Cayley graph\n");
|
2016-06-09 19:11:20 +00:00
|
|
|
|
|
|
|
generate_coxeter_graph(type, graph_data);
|
|
|
|
|
|
|
|
for(int i = 0; i < order; i++) {
|
|
|
|
graph_unsorted[i].left = &left[i*rank];
|
|
|
|
graph_unsorted[i].right = &right[i*rank];
|
|
|
|
for(int j = 0; j < rank; j++)
|
|
|
|
graph_unsorted[i].left[j] = graph_data[i*rank + j];
|
|
|
|
graph_unsorted[i].word = 0;
|
|
|
|
graph_unsorted[i].wordlength = INT_MAX;
|
|
|
|
graph_unsorted[i].bruhat_lower = 0;
|
|
|
|
graph_unsorted[i].bruhat_higher = 0;
|
|
|
|
graph_unsorted[i].is_hyperplane_reflection = 0;
|
|
|
|
}
|
|
|
|
|
2016-06-20 08:37:21 +00:00
|
|
|
// DEBUG("Find wordlengths\n");
|
2016-06-09 19:11:20 +00:00
|
|
|
|
|
|
|
graph_unsorted[0].wordlength = 0;
|
|
|
|
queue_init(&queue);
|
|
|
|
queue_put(&queue, 0);
|
|
|
|
while((current = queue_get(&queue)) != -1) {
|
|
|
|
for(int i = 0; i < rank; i++) {
|
|
|
|
int neighbor = graph_unsorted[current].left[i];
|
|
|
|
if(graph_unsorted[neighbor].wordlength > graph_unsorted[current].wordlength + 1) {
|
|
|
|
graph_unsorted[neighbor].wordlength = graph_unsorted[current].wordlength + 1;
|
|
|
|
queue_put(&queue, neighbor);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
max_wordlength = 0;
|
|
|
|
for(int i = 0; i < order; i++)
|
|
|
|
if(graph_unsorted[i].wordlength > max_wordlength)
|
|
|
|
max_wordlength = graph_unsorted[i].wordlength;
|
|
|
|
|
|
|
|
string_buffer1 = (char*)malloc((max_wordlength+1)*sizeof(char));
|
|
|
|
string_buffer2 = (char*)malloc((max_wordlength+1)*sizeof(char));
|
|
|
|
|
2016-06-20 08:37:21 +00:00
|
|
|
// DEBUG("Sort by wordlength\n");
|
2016-06-09 19:11:20 +00:00
|
|
|
|
|
|
|
for(int i = 0; i < order; i++)
|
|
|
|
wordlength_order[i] = i;
|
|
|
|
qsort_r(wordlength_order, order, sizeof(int), compare_wordlength, graph_unsorted); // so wordlength_order is a map new index -> old index
|
|
|
|
for(int i = 0; i < order; i++)
|
|
|
|
reverse_wordlength_order[wordlength_order[i]] = i; // reverse_wordlength_order is a map old index -> new index
|
|
|
|
for(int i = 0; i < order; i++) {
|
|
|
|
graph[i] = graph_unsorted[wordlength_order[i]]; // copy the whole thing
|
|
|
|
for(int j = 0; j < rank; j++)
|
|
|
|
graph[i].left[j] = reverse_wordlength_order[graph[i].left[j]]; // rewrite references
|
|
|
|
}
|
|
|
|
|
2016-06-20 08:37:21 +00:00
|
|
|
// DEBUG("Find words\n");
|
2016-06-09 19:11:20 +00:00
|
|
|
|
|
|
|
words = (int*)malloc(order*max_wordlength*sizeof(int));
|
|
|
|
memset(words, 0, order*max_wordlength*sizeof(int));
|
|
|
|
graph[0].word = &words[0];
|
|
|
|
queue_init(&queue);
|
|
|
|
queue_put(&queue, 0);
|
|
|
|
while((current = queue_get(&queue)) != -1) {
|
|
|
|
for(int i = 0; i < rank; i++) {
|
|
|
|
int neighbor = graph[current].left[i];
|
|
|
|
if(graph[neighbor].wordlength == graph[current].wordlength + 1 && graph[neighbor].word == 0) {
|
|
|
|
graph[neighbor].word = &words[neighbor*max_wordlength];
|
|
|
|
memcpy(&graph[neighbor].word[1], &graph[current].word[0], graph[current].wordlength*sizeof(int));
|
|
|
|
graph[neighbor].word[0] = i;
|
|
|
|
queue_put(&queue, neighbor);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-06-20 08:37:21 +00:00
|
|
|
// DEBUG("Generate right edges\n");
|
2016-06-09 19:11:20 +00:00
|
|
|
|
|
|
|
for(int i = 0; i < order; i++) {
|
|
|
|
for(int j = 0; j < rank; j++) {
|
|
|
|
current = graph[0].left[j];
|
|
|
|
for(int k = graph[i].wordlength - 1; k >= 0; k--) { // apply group element from right to left
|
|
|
|
current = graph[current].left[graph[i].word[k]];
|
|
|
|
}
|
|
|
|
graph[i].right[j] = current;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-06-20 08:37:21 +00:00
|
|
|
// DEBUG("Find opposites\n");
|
2016-06-09 19:11:20 +00:00
|
|
|
|
|
|
|
node_t *longest = &graph[order-1];
|
|
|
|
for(int i = 0; i < order; i++) {
|
|
|
|
current = i;
|
|
|
|
for(int k = longest->wordlength - 1; k >= 0; k--)
|
|
|
|
current = graph[current].left[longest->word[k]];
|
|
|
|
graph[i].opposite = current;
|
|
|
|
}
|
|
|
|
|
2016-06-20 08:37:21 +00:00
|
|
|
// DEBUG("Enumerate hyperplanes\n"); // every right edge is a reflection along a hyperplane; calculate what this reflection does to the identity
|
2016-06-09 19:11:20 +00:00
|
|
|
|
|
|
|
hyperplane_count = 0;
|
|
|
|
for(int i = 0; i < order; i++) {
|
|
|
|
for(int j = 0; j < rank; j++) {
|
|
|
|
current = 0;
|
|
|
|
int *word1 = graph[i].word;
|
|
|
|
int word1len = graph[i].wordlength;
|
|
|
|
int *word2 = graph[graph[i].right[j]].word; // want to calculate word2 * word1^{-1}
|
|
|
|
int word2len = graph[graph[i].right[j]].wordlength;
|
|
|
|
for(int k = 0; k < word1len; k++) // apply inverse, i.e. go from left to right
|
|
|
|
current = graph[current].left[word1[k]];
|
|
|
|
for(int k = word2len - 1; k >= 0; k--) // now from right to left
|
|
|
|
current = graph[current].left[word2[k]];
|
|
|
|
if(graph[current].is_hyperplane_reflection == 0) {
|
|
|
|
graph[current].is_hyperplane_reflection = 1;
|
|
|
|
hyperplane_count++;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-06-20 08:37:21 +00:00
|
|
|
// DEBUG("The Weyl chambers are bounded by %d hyperplanes\n", hyperplane_count);
|
|
|
|
// DEBUG("Generate folding order\n");
|
2016-06-09 19:11:20 +00:00
|
|
|
|
|
|
|
edgelists = (edgelist_t*)malloc(order*hyperplane_count*sizeof(edgelist_t));
|
|
|
|
for(int i = 0; i < order; i++) {
|
|
|
|
if(graph[i].is_hyperplane_reflection) {
|
|
|
|
for(int j = 0; j < order; j++) {
|
|
|
|
|
|
|
|
current = j;
|
|
|
|
for(int k = graph[i].wordlength - 1; k >= 0; k--) // apply hyperplane reflection
|
|
|
|
current = graph[current].left[graph[i].word[k]];
|
|
|
|
|
|
|
|
if(graph[j].wordlength < graph[current].wordlength) { // current has higher bruhat order than j
|
|
|
|
edgelists[edgelist_count].to = j;
|
|
|
|
edgelists[edgelist_count].next = graph[current].bruhat_lower;
|
|
|
|
graph[current].bruhat_lower = &edgelists[edgelist_count];
|
|
|
|
edgelist_count++;
|
|
|
|
} else if(graph[j].wordlength > graph[current].wordlength) { // j has higher bruhat order than current; these are already included from the other side
|
|
|
|
} else {
|
|
|
|
ERROR(1, "Chambers of equal word lengths should not be folded on each other!\n");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-06-20 08:37:21 +00:00
|
|
|
// DEBUG("Remove redundant edges\n");
|
2016-06-09 19:11:20 +00:00
|
|
|
|
|
|
|
for(int i = 0; i < order; i++) {
|
|
|
|
memset(seen, 0, order*sizeof(int));
|
|
|
|
for(int len = 1; len <= max_wordlength; len++) {
|
|
|
|
// remove all edges originating from i of length len which connect to something already seen using shorter edges
|
|
|
|
edge = graph[i].bruhat_lower;
|
|
|
|
previous = (edgelist_t*)0;
|
|
|
|
while(edge) {
|
|
|
|
if(seen[edge->to] && graph[i].wordlength - graph[edge->to].wordlength == len) {
|
|
|
|
// printf("deleting from %d to %d\n", i, edge->to);
|
|
|
|
if(previous)
|
|
|
|
previous->next = edge->next;
|
|
|
|
else
|
|
|
|
graph[i].bruhat_lower = edge->next;
|
|
|
|
} else {
|
|
|
|
previous = edge;
|
|
|
|
}
|
|
|
|
edge = edge->next;
|
|
|
|
}
|
|
|
|
|
|
|
|
// see which nodes we can reach using only edges up to length len, mark them as seen
|
|
|
|
queue_init(&queue);
|
|
|
|
queue_put(&queue, i);
|
|
|
|
seen[i] = 1;
|
|
|
|
while((current = queue_get(&queue)) != -1) {
|
|
|
|
edge = graph[current].bruhat_lower;
|
|
|
|
while(edge) {
|
|
|
|
if(!seen[edge->to] && graph[current].wordlength - graph[edge->to].wordlength == len) {
|
|
|
|
seen[edge->to] = 1;
|
|
|
|
queue_put(&queue, edge->to);
|
|
|
|
}
|
|
|
|
edge = edge->next;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-06-20 08:37:21 +00:00
|
|
|
// DEBUG("Reverse folding order\n");
|
2016-06-09 19:11:20 +00:00
|
|
|
|
|
|
|
for(int i = 0; i < order; i++) {
|
|
|
|
edge = graph[i].bruhat_lower;
|
|
|
|
while(edge) {
|
|
|
|
edgelists[edgelist_count].to = i;
|
|
|
|
edgelists[edgelist_count].next = graph[edge->to].bruhat_higher;
|
|
|
|
graph[edge->to].bruhat_higher = &edgelists[edgelist_count];
|
|
|
|
edgelist_count++;
|
|
|
|
edge = edge->next;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-06-20 08:37:21 +00:00
|
|
|
printf("Rank: %d\t\tOrder: %d\t\tHyperplanes: %d\n", rank, order, hyperplane_count);
|
|
|
|
printf("\n");
|
|
|
|
printf("Group elements: \n");
|
|
|
|
for(int i = 0, wl = 0; i < order; i++) {
|
|
|
|
if(i == 0) {
|
|
|
|
printf("1");
|
|
|
|
} else if(graph[i].wordlength > wl) {
|
|
|
|
printf("\n%s ", alphabetize(graph[i].word, graph[i].wordlength, alphabet, string_buffer1));
|
|
|
|
wl = graph[i].wordlength;
|
|
|
|
} else
|
2016-06-09 19:11:20 +00:00
|
|
|
printf("%s ", alphabetize(graph[i].word, graph[i].wordlength, alphabet, string_buffer1));
|
|
|
|
}
|
2016-06-20 08:37:21 +00:00
|
|
|
printf("\n\n");
|
2016-06-09 19:11:20 +00:00
|
|
|
|
2016-06-20 08:37:21 +00:00
|
|
|
// DEBUG("Enumerate thickenings\n");
|
2016-06-09 19:11:20 +00:00
|
|
|
|
|
|
|
thickenings_count = fat_count = slim_count = balanced_count = 0;
|
|
|
|
memset(level, 0, order*sizeof(int));
|
|
|
|
current_level = 1;
|
|
|
|
head = order - 1;
|
|
|
|
level[head] = -1;
|
|
|
|
while(current_level > 0) {
|
|
|
|
// calculate transitive closure
|
|
|
|
queue_init(&queue);
|
|
|
|
queue_put(&queue, head);
|
|
|
|
while((current = queue_get(&queue)) != -1) {
|
|
|
|
edge = graph[current].bruhat_lower;
|
|
|
|
while(edge) {
|
|
|
|
if(level[edge->to] == 0) {
|
|
|
|
level[edge->to] = current_level;
|
|
|
|
queue_put(&queue, edge->to);
|
|
|
|
}
|
|
|
|
edge = edge->next;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
is_fat = is_slim = 1;
|
|
|
|
for(int i = 0; i < order; i++) {
|
|
|
|
if(level[graph[i].opposite] != 0) {
|
|
|
|
if(level[i] != 0)
|
|
|
|
is_slim = 0;
|
|
|
|
} else {
|
|
|
|
if(level[i] == 0)
|
|
|
|
is_fat = 0;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// count
|
|
|
|
thickenings_count++;
|
|
|
|
if(is_fat)
|
|
|
|
fat_count++;
|
|
|
|
if(is_slim)
|
|
|
|
slim_count++;
|
2016-06-20 08:37:21 +00:00
|
|
|
if(is_slim && is_fat) {
|
|
|
|
ERROR(balanced_count >= MAX_THICKENINGS, "Too many balanced thickenings! Increase MAX_THICKENINGS\n");
|
|
|
|
memcpy(&balanced_thickenings[balanced_count*order], level, order*sizeof(int));
|
2016-06-09 19:11:20 +00:00
|
|
|
balanced_count++;
|
2016-06-20 08:37:21 +00:00
|
|
|
}
|
2016-06-09 19:11:20 +00:00
|
|
|
|
|
|
|
if(is_fat && is_slim) {
|
|
|
|
// check for invariances
|
|
|
|
for(int j = 0; j < rank; j++) {
|
2016-06-20 08:37:21 +00:00
|
|
|
left_invariant[j] = 1;
|
|
|
|
right_invariant[j] = 1;
|
2016-06-09 19:11:20 +00:00
|
|
|
}
|
|
|
|
for(int i = 0; i < order; i++) {
|
2016-06-20 08:37:21 +00:00
|
|
|
for(int j = 0; j < rank; j++) {
|
|
|
|
if(level[i] == 0 && level[graph[i].left[j]] != 0 || level[i] != 0 && level[graph[i].left[j]] == 0)
|
|
|
|
left_invariant[j] = 0;
|
|
|
|
if(level[i] == 0 && level[graph[i].right[j]] != 0 || level[i] != 0 && level[graph[i].right[j]] == 0)
|
|
|
|
right_invariant[j] = 0;
|
|
|
|
}
|
2016-06-09 19:11:20 +00:00
|
|
|
}
|
2016-06-20 08:37:21 +00:00
|
|
|
print_balanced_thickening(rank, order, level, left_invariant, right_invariant, alphabet);
|
2016-06-09 19:11:20 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// try to find empty spot to the left of "head"
|
|
|
|
for(i = head - 1; i >= 0; i--)
|
|
|
|
if(level[i] == 0)
|
2016-06-20 08:37:21 +00:00
|
|
|
break;
|
2016-06-09 19:11:20 +00:00
|
|
|
if(i >= 0) {
|
|
|
|
head = i;
|
|
|
|
level[head] = -1;
|
|
|
|
current_level++;
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
|
|
|
// if none was found, try to move "head" to the left
|
|
|
|
while(current_level > 0) {
|
|
|
|
for(i = head - 1; i >= 0; i--)
|
|
|
|
if(level[i] == 0 || level[i] >= current_level)
|
|
|
|
break;
|
|
|
|
if(i >= 0) { // if this was successful, just move head
|
|
|
|
level[head] = 0;
|
|
|
|
head = i;
|
|
|
|
level[head] = -1;
|
|
|
|
break;
|
|
|
|
} else { // if moving the head is not possible, take the next head to the right
|
|
|
|
current_level--;
|
|
|
|
level[head] = 0;
|
|
|
|
do {
|
|
|
|
head++;
|
|
|
|
} while(head < order && level[head] != -1);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// clean up
|
|
|
|
for(int i = 0; i < head; i++)
|
|
|
|
if(level[i] >= current_level)
|
|
|
|
level[i] = 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
printf("\n");
|
2016-06-20 08:37:21 +00:00
|
|
|
printf("Found %d thickenings, %d fat, %d slim, %d balanced\n\n", thickenings_count, fat_count, slim_count, balanced_count);
|
2016-06-09 19:11:20 +00:00
|
|
|
|
2016-07-07 10:35:23 +00:00
|
|
|
|
|
|
|
|
2016-06-09 19:11:20 +00:00
|
|
|
/*
|
2016-06-20 08:37:21 +00:00
|
|
|
for(int i = 0; i < balanced_count; i++) {
|
|
|
|
// figure out invariances
|
|
|
|
for(int j = 0; j < rank; j++) {
|
|
|
|
left_invariant[j] = 1;
|
|
|
|
right_invariant[j] = 1;
|
|
|
|
}
|
2016-06-09 19:11:20 +00:00
|
|
|
|
2016-06-20 08:37:21 +00:00
|
|
|
int *current_thickening = balanced_thickenings + i*order;
|
2016-06-09 19:11:20 +00:00
|
|
|
|
2016-06-20 08:37:21 +00:00
|
|
|
for(int k = 0; k < order; k++) {
|
|
|
|
for(int j = 0; j < rank; j++) {
|
|
|
|
if(current_thickening[k] == 0 && current_thickening[graph[k].left[j]] != 0 || current_thickening[k] != 0 && current_thickening[graph[k].left[j]] == 0)
|
|
|
|
left_invariant[j] = 0;
|
|
|
|
if(current_thickening[k] == 0 && current_thickening[graph[k].right[j]] != 0 || current_thickening[k] != 0 && current_thickening[graph[k].right[j]] == 0)
|
|
|
|
right_invariant[j] = 0;
|
|
|
|
}
|
2016-06-09 19:11:20 +00:00
|
|
|
}
|
|
|
|
|
2016-06-20 08:37:21 +00:00
|
|
|
printf("left: ");
|
|
|
|
for(int k = 0; k < rank; k++)
|
|
|
|
printf("%c", left_invariant[k] ? alphabet[k] : ' ');
|
|
|
|
printf(" right: ");
|
|
|
|
for(int k = 0; k < rank; k++)
|
|
|
|
printf("%c", right_invariant[k] ? alphabet[k] : ' ');
|
2016-06-09 19:11:20 +00:00
|
|
|
printf("\n");
|
|
|
|
}
|
|
|
|
*/
|
|
|
|
|
2016-06-20 08:37:21 +00:00
|
|
|
|
2016-06-09 19:11:20 +00:00
|
|
|
free(edgelists);
|
|
|
|
free(words);
|
|
|
|
free(string_buffer1);
|
|
|
|
free(string_buffer2);
|
|
|
|
free(graph);
|
|
|
|
free(graph_unsorted);
|
|
|
|
free(graph_data);
|
|
|
|
free(wordlength_order);
|
|
|
|
free(reverse_wordlength_order);
|
|
|
|
free(seen);
|
|
|
|
free(level);
|
|
|
|
free(left);
|
|
|
|
free(right);
|
|
|
|
free(left_invariant);
|
|
|
|
free(right_invariant);
|
|
|
|
free(type.factors);
|
2016-06-20 08:37:21 +00:00
|
|
|
free(balanced_thickenings);
|
2016-06-09 19:11:20 +00:00
|
|
|
|
|
|
|
return 0;
|
|
|
|
}
|