2016-06-09 19:11:20 +00:00
# define _GNU_SOURCE
# include <stdio.h>
# include <limits.h>
# include <stdlib.h>
# include <malloc.h>
# include <memory.h>
2016-07-26 08:09:34 +00:00
# include "thickenings.h"
2016-06-09 19:11:20 +00:00
# include "coxeter.h"
# include "queue.h"
2016-07-26 08:09:34 +00:00
char * alphabetize ( int * word , int len , const char * alphabet , char * buffer )
2016-06-09 19:11:20 +00:00
{
int i = 0 ;
for ( i = 0 ; i < len ; i + + )
buffer [ i ] = alphabet [ word [ i ] ] ;
buffer [ i ] = 0 ;
return buffer ;
}
2016-08-26 12:56:23 +00:00
void print_thickening ( int rank , int order , const int * thickening , int is_fat , int is_slim , int conflict , const char * alphabet , FILE * f )
{
for ( int i = 0 ; i < order ; i + + ) {
if ( thickening [ i ] = = 1000 )
fprintf ( f , " \ e[41mx \ e[40m \ e[m " ) ;
else if ( thickening [ i ] < 0 & & thickening [ i ] > - 10 )
fprintf ( f , " \ e[47m \ e[30m%d \ e[40m \ e[m " , - thickening [ i ] ) ;
else if ( thickening [ i ] < = - 10 )
fprintf ( f , " \ e[47m \ e[30m+ \ e[40m \ e[m " ) ;
else if ( thickening [ i ] > 0 & & thickening [ i ] < 10 )
fprintf ( f , " %d " , thickening [ i ] ) ;
else if ( thickening [ i ] > = 10 )
fprintf ( f , " + " ) ;
else
fprintf ( f , " " ) ;
}
if ( is_fat )
fprintf ( f , " F " ) ;
if ( is_slim )
fprintf ( f , " S " ) ;
if ( conflict )
fprintf ( f , " C " ) ;
fprintf ( f , " \n " ) ;
}
2016-07-26 08:09:34 +00:00
void print_balanced_thickening ( int rank , int order , const int * thickening , const int * left_invariant , const int * right_invariant , const char * alphabet , FILE * f )
2016-06-20 08:37:21 +00:00
{
for ( int i = 0 ; i < order ; i + + ) {
if ( thickening [ i ] )
2016-07-26 08:09:34 +00:00
fprintf ( f , " x " ) ;
2016-06-20 08:37:21 +00:00
else
2016-07-26 08:09:34 +00:00
fprintf ( f , " 0 " ) ;
2016-06-20 08:37:21 +00:00
}
2016-07-26 08:09:34 +00:00
fprintf ( f , " left: " ) ;
2016-06-20 08:37:21 +00:00
for ( int j = 0 ; j < rank ; j + + )
if ( left_invariant [ j ] )
2016-07-26 08:09:34 +00:00
fprintf ( f , " %c " , alphabet [ j ] ) ;
2016-06-20 08:37:21 +00:00
else
2016-07-26 08:09:34 +00:00
fprintf ( f , " " ) ;
2016-06-20 08:37:21 +00:00
2016-07-26 08:09:34 +00:00
fprintf ( f , " right: " ) ;
2016-06-20 08:37:21 +00:00
for ( int j = 0 ; j < rank ; j + + )
if ( right_invariant [ j ] )
2016-07-26 08:09:34 +00:00
fprintf ( f , " %c " , alphabet [ j ] ) ;
2016-06-20 08:37:21 +00:00
else
2016-07-26 08:09:34 +00:00
fprintf ( f , " " ) ;
2016-06-20 08:37:21 +00:00
2016-07-26 08:09:34 +00:00
fprintf ( f , " \n " ) ;
2016-06-20 08:37:21 +00:00
}
2016-06-09 19:11:20 +00:00
static int compare_wordlength ( const void * a , const void * b , void * gr )
{
int i = * ( ( int * ) a ) ;
int j = * ( ( int * ) b ) ;
node_t * graph = ( node_t * ) gr ;
return graph [ i ] . wordlength - graph [ j ] . wordlength ;
}
2016-07-26 08:09:34 +00:00
void prepare_graph ( semisimple_type_t type , node_t * graph , edgelist_t * * edgelists_pointer , int * * words_pointer ) // the edgelists_pointer and words_pointer arguments are just for freeing afterwards
2016-06-09 19:11:20 +00:00
{
queue_t queue ;
int rank , order ;
2016-07-26 08:09:34 +00:00
edgelist_t * edge , * previous ;
int edgelist_count , max_wordlength , hyperplane_count ;
int current ;
2016-06-09 19:11:20 +00:00
2016-07-26 08:09:34 +00:00
int * graph_data ;
node_t * graph_unsorted ;
int * wordlength_order , * reverse_wordlength_order , * seen , * words ;
edgelist_t * edgelists ;
2016-06-09 19:11:20 +00:00
2016-07-26 08:09:34 +00:00
// initialize
2016-06-09 19:11:20 +00:00
rank = coxeter_rank ( type ) ;
order = coxeter_order ( type ) ;
graph_data = ( int * ) malloc ( order * rank * sizeof ( int ) ) ;
2016-07-26 08:09:34 +00:00
graph_unsorted = ( node_t * ) malloc ( order * sizeof ( node_t ) ) ;
2016-06-09 19:11:20 +00:00
wordlength_order = ( int * ) malloc ( order * sizeof ( int ) ) ;
reverse_wordlength_order = ( int * ) malloc ( order * sizeof ( int ) ) ;
seen = ( int * ) malloc ( order * sizeof ( int ) ) ;
for ( int i = 0 ; i < order ; i + + ) {
2016-07-26 08:09:34 +00:00
graph_unsorted [ i ] . left = graph [ i ] . left ;
graph_unsorted [ i ] . right = graph [ i ] . right ;
2016-06-09 19:11:20 +00:00
graph_unsorted [ i ] . word = 0 ;
graph_unsorted [ i ] . wordlength = INT_MAX ;
graph_unsorted [ i ] . bruhat_lower = 0 ;
graph_unsorted [ i ] . bruhat_higher = 0 ;
graph_unsorted [ i ] . is_hyperplane_reflection = 0 ;
}
2016-07-26 08:09:34 +00:00
// get coxeter graph
generate_coxeter_graph ( type , graph_data ) ;
for ( int i = 0 ; i < order ; i + + )
for ( int j = 0 ; j < rank ; j + + )
graph_unsorted [ i ] . left [ j ] = graph_data [ i * rank + j ] ;
// find wordlengths
2016-06-09 19:11:20 +00:00
graph_unsorted [ 0 ] . wordlength = 0 ;
queue_init ( & queue ) ;
queue_put ( & queue , 0 ) ;
while ( ( current = queue_get ( & queue ) ) ! = - 1 ) {
for ( int i = 0 ; i < rank ; i + + ) {
int neighbor = graph_unsorted [ current ] . left [ i ] ;
if ( graph_unsorted [ neighbor ] . wordlength > graph_unsorted [ current ] . wordlength + 1 ) {
graph_unsorted [ neighbor ] . wordlength = graph_unsorted [ current ] . wordlength + 1 ;
queue_put ( & queue , neighbor ) ;
}
}
}
max_wordlength = 0 ;
for ( int i = 0 ; i < order ; i + + )
if ( graph_unsorted [ i ] . wordlength > max_wordlength )
max_wordlength = graph_unsorted [ i ] . wordlength ;
2016-07-26 08:09:34 +00:00
// sort by wordlength
2016-06-09 19:11:20 +00:00
for ( int i = 0 ; i < order ; i + + )
wordlength_order [ i ] = i ;
qsort_r ( wordlength_order , order , sizeof ( int ) , compare_wordlength , graph_unsorted ) ; // so wordlength_order is a map new index -> old index
for ( int i = 0 ; i < order ; i + + )
reverse_wordlength_order [ wordlength_order [ i ] ] = i ; // reverse_wordlength_order is a map old index -> new index
for ( int i = 0 ; i < order ; i + + ) {
graph [ i ] = graph_unsorted [ wordlength_order [ i ] ] ; // copy the whole thing
for ( int j = 0 ; j < rank ; j + + )
graph [ i ] . left [ j ] = reverse_wordlength_order [ graph [ i ] . left [ j ] ] ; // rewrite references
}
2016-07-26 08:09:34 +00:00
// find words
2016-06-09 19:11:20 +00:00
words = ( int * ) malloc ( order * max_wordlength * sizeof ( int ) ) ;
memset ( words , 0 , order * max_wordlength * sizeof ( int ) ) ;
graph [ 0 ] . word = & words [ 0 ] ;
queue_init ( & queue ) ;
queue_put ( & queue , 0 ) ;
while ( ( current = queue_get ( & queue ) ) ! = - 1 ) {
for ( int i = 0 ; i < rank ; i + + ) {
int neighbor = graph [ current ] . left [ i ] ;
if ( graph [ neighbor ] . wordlength = = graph [ current ] . wordlength + 1 & & graph [ neighbor ] . word = = 0 ) {
graph [ neighbor ] . word = & words [ neighbor * max_wordlength ] ;
memcpy ( & graph [ neighbor ] . word [ 1 ] , & graph [ current ] . word [ 0 ] , graph [ current ] . wordlength * sizeof ( int ) ) ;
graph [ neighbor ] . word [ 0 ] = i ;
queue_put ( & queue , neighbor ) ;
}
}
}
2016-07-26 08:09:34 +00:00
// generate right edges
2016-06-09 19:11:20 +00:00
for ( int i = 0 ; i < order ; i + + ) {
for ( int j = 0 ; j < rank ; j + + ) {
current = graph [ 0 ] . left [ j ] ;
for ( int k = graph [ i ] . wordlength - 1 ; k > = 0 ; k - - ) { // apply group element from right to left
current = graph [ current ] . left [ graph [ i ] . word [ k ] ] ;
}
graph [ i ] . right [ j ] = current ;
}
}
2016-07-26 08:09:34 +00:00
// find opposites
2016-06-09 19:11:20 +00:00
node_t * longest = & graph [ order - 1 ] ;
for ( int i = 0 ; i < order ; i + + ) {
current = i ;
for ( int k = longest - > wordlength - 1 ; k > = 0 ; k - - )
current = graph [ current ] . left [ longest - > word [ k ] ] ;
graph [ i ] . opposite = current ;
}
2016-07-26 08:09:34 +00:00
// enumerate hyperplanes
2016-06-09 19:11:20 +00:00
hyperplane_count = 0 ;
for ( int i = 0 ; i < order ; i + + ) {
for ( int j = 0 ; j < rank ; j + + ) {
current = 0 ;
int * word1 = graph [ i ] . word ;
int word1len = graph [ i ] . wordlength ;
int * word2 = graph [ graph [ i ] . right [ j ] ] . word ; // want to calculate word2 * word1^{-1}
int word2len = graph [ graph [ i ] . right [ j ] ] . wordlength ;
for ( int k = 0 ; k < word1len ; k + + ) // apply inverse, i.e. go from left to right
current = graph [ current ] . left [ word1 [ k ] ] ;
for ( int k = word2len - 1 ; k > = 0 ; k - - ) // now from right to left
current = graph [ current ] . left [ word2 [ k ] ] ;
if ( graph [ current ] . is_hyperplane_reflection = = 0 ) {
graph [ current ] . is_hyperplane_reflection = 1 ;
hyperplane_count + + ;
}
}
}
2016-07-26 08:09:34 +00:00
// generate folding order
2016-06-09 19:11:20 +00:00
edgelists = ( edgelist_t * ) malloc ( order * hyperplane_count * sizeof ( edgelist_t ) ) ;
2016-07-26 08:09:34 +00:00
edgelist_count = 0 ;
2016-06-09 19:11:20 +00:00
for ( int i = 0 ; i < order ; i + + ) {
if ( graph [ i ] . is_hyperplane_reflection ) {
for ( int j = 0 ; j < order ; j + + ) {
current = j ;
for ( int k = graph [ i ] . wordlength - 1 ; k > = 0 ; k - - ) // apply hyperplane reflection
current = graph [ current ] . left [ graph [ i ] . word [ k ] ] ;
if ( graph [ j ] . wordlength < graph [ current ] . wordlength ) { // current has higher bruhat order than j
edgelists [ edgelist_count ] . to = j ;
edgelists [ edgelist_count ] . next = graph [ current ] . bruhat_lower ;
graph [ current ] . bruhat_lower = & edgelists [ edgelist_count ] ;
edgelist_count + + ;
} else if ( graph [ j ] . wordlength > graph [ current ] . wordlength ) { // j has higher bruhat order than current; these are already included from the other side
} else {
ERROR ( 1 , " Chambers of equal word lengths should not be folded on each other! \n " ) ;
}
}
}
}
2016-07-26 08:09:34 +00:00
// remove redundant edges
2016-06-09 19:11:20 +00:00
for ( int i = 0 ; i < order ; i + + ) {
memset ( seen , 0 , order * sizeof ( int ) ) ;
for ( int len = 1 ; len < = max_wordlength ; len + + ) {
// remove all edges originating from i of length len which connect to something already seen using shorter edges
edge = graph [ i ] . bruhat_lower ;
previous = ( edgelist_t * ) 0 ;
while ( edge ) {
if ( seen [ edge - > to ] & & graph [ i ] . wordlength - graph [ edge - > to ] . wordlength = = len ) {
2016-07-26 08:09:34 +00:00
// fprintf(stderr, "deleting from %d to %d\n", i, edge->to);
2016-06-09 19:11:20 +00:00
if ( previous )
previous - > next = edge - > next ;
else
graph [ i ] . bruhat_lower = edge - > next ;
} else {
previous = edge ;
}
edge = edge - > next ;
}
// see which nodes we can reach using only edges up to length len, mark them as seen
queue_init ( & queue ) ;
queue_put ( & queue , i ) ;
seen [ i ] = 1 ;
while ( ( current = queue_get ( & queue ) ) ! = - 1 ) {
edge = graph [ current ] . bruhat_lower ;
while ( edge ) {
if ( ! seen [ edge - > to ] & & graph [ current ] . wordlength - graph [ edge - > to ] . wordlength = = len ) {
seen [ edge - > to ] = 1 ;
queue_put ( & queue , edge - > to ) ;
}
edge = edge - > next ;
}
}
}
}
2016-07-26 08:09:34 +00:00
// reverse folding order
2016-06-09 19:11:20 +00:00
for ( int i = 0 ; i < order ; i + + ) {
edge = graph [ i ] . bruhat_lower ;
while ( edge ) {
edgelists [ edgelist_count ] . to = i ;
edgelists [ edgelist_count ] . next = graph [ edge - > to ] . bruhat_higher ;
graph [ edge - > to ] . bruhat_higher = & edgelists [ edgelist_count ] ;
edgelist_count + + ;
edge = edge - > next ;
}
}
2016-07-26 08:09:34 +00:00
* edgelists_pointer = edgelists ;
* words_pointer = words ;
free ( graph_data ) ;
free ( graph_unsorted ) ;
free ( wordlength_order ) ;
free ( reverse_wordlength_order ) ;
free ( seen ) ;
}
void enumerate_balanced_thickenings ( semisimple_type_t type , node_t * graph , const char * alphabet , FILE * outfile )
{
int rank , order ;
int * level ;
int * left_invariant , * right_invariant ;
long thickenings_count , fat_count , slim_count , balanced_count ;
int is_fat , is_slim ;
int current_level , head , current ;
int i ;
2016-08-26 12:56:23 +00:00
int conflict ;
2016-07-26 08:09:34 +00:00
edgelist_t * edge ;
2016-06-09 19:11:20 +00:00
2016-07-26 08:09:34 +00:00
queue_t queue ;
rank = coxeter_rank ( type ) ;
order = coxeter_order ( type ) ;
level = ( int * ) malloc ( order * sizeof ( int ) ) ;
left_invariant = ( int * ) malloc ( rank * sizeof ( int ) ) ;
right_invariant = ( int * ) malloc ( rank * sizeof ( int ) ) ;
2016-06-09 19:11:20 +00:00
thickenings_count = fat_count = slim_count = balanced_count = 0 ;
memset ( level , 0 , order * sizeof ( int ) ) ;
current_level = 1 ;
head = order - 1 ;
2016-08-26 12:56:23 +00:00
level [ head ] = 1000 ;
2016-06-09 19:11:20 +00:00
while ( current_level > 0 ) {
// calculate transitive closure
2016-08-26 12:56:23 +00:00
conflict = 0 ;
2016-06-09 19:11:20 +00:00
queue_init ( & queue ) ;
queue_put ( & queue , head ) ;
2016-08-26 12:56:23 +00:00
for ( int i = head + 1 ; level [ i ] ! = 1000 & & i < order ; i + + ) {
if ( level [ graph [ i ] . opposite ] = = 0 ) {
level [ graph [ i ] . opposite ] = current_level ;
queue_put ( & queue , graph [ i ] . opposite ) ;
}
}
2016-06-09 19:11:20 +00:00
while ( ( current = queue_get ( & queue ) ) ! = - 1 ) {
2016-08-26 12:56:23 +00:00
if ( level [ current ] < 0 | | level [ graph [ current ] . opposite ] > 0 ) { // conflict, can not be slim
conflict = 1 ;
break ;
}
if ( level [ graph [ current ] . opposite ] = = 0 )
level [ graph [ current ] . opposite ] = - current_level ;
2016-06-09 19:11:20 +00:00
edge = graph [ current ] . bruhat_lower ;
while ( edge ) {
2016-08-26 12:56:23 +00:00
if ( level [ edge - > to ] < 0 ) {
conflict = 1 ;
break ;
}
2016-06-09 19:11:20 +00:00
if ( level [ edge - > to ] = = 0 ) {
level [ edge - > to ] = current_level ;
queue_put ( & queue , edge - > to ) ;
}
edge = edge - > next ;
}
}
2016-08-26 12:56:23 +00:00
/*
2016-07-26 08:09:34 +00:00
// we have a thickening, do something with it!
2016-06-09 19:11:20 +00:00
is_fat = is_slim = 1 ;
for ( int i = 0 ; i < order ; i + + ) {
if ( level [ graph [ i ] . opposite ] ! = 0 ) {
if ( level [ i ] ! = 0 )
is_slim = 0 ;
} else {
if ( level [ i ] = = 0 )
is_fat = 0 ;
}
}
// count
thickenings_count + + ;
if ( is_fat )
fat_count + + ;
if ( is_slim )
slim_count + + ;
2016-06-20 08:37:21 +00:00
if ( is_slim & & is_fat ) {
ERROR ( balanced_count > = MAX_THICKENINGS , " Too many balanced thickenings! Increase MAX_THICKENINGS \n " ) ;
2016-07-26 08:09:34 +00:00
//memcpy(&balanced_thickenings[balanced_count*order], level, order*sizeof(int));
fwrite ( level , sizeof ( int ) , order , outfile ) ;
2016-06-09 19:11:20 +00:00
balanced_count + + ;
2016-06-20 08:37:21 +00:00
}
2016-08-26 12:56:23 +00:00
*/
2016-06-09 19:11:20 +00:00
2016-08-26 12:56:23 +00:00
/*
2016-07-26 08:09:34 +00:00
// print out the thickening
2016-06-09 19:11:20 +00:00
if ( is_fat & & is_slim ) {
// check for invariances
for ( int j = 0 ; j < rank ; j + + ) {
2016-06-20 08:37:21 +00:00
left_invariant [ j ] = 1 ;
right_invariant [ j ] = 1 ;
2016-06-09 19:11:20 +00:00
}
for ( int i = 0 ; i < order ; i + + ) {
2016-06-20 08:37:21 +00:00
for ( int j = 0 ; j < rank ; j + + ) {
if ( level [ i ] = = 0 & & level [ graph [ i ] . left [ j ] ] ! = 0 | | level [ i ] ! = 0 & & level [ graph [ i ] . left [ j ] ] = = 0 )
left_invariant [ j ] = 0 ;
if ( level [ i ] = = 0 & & level [ graph [ i ] . right [ j ] ] ! = 0 | | level [ i ] ! = 0 & & level [ graph [ i ] . right [ j ] ] = = 0 )
right_invariant [ j ] = 0 ;
}
2016-06-09 19:11:20 +00:00
}
2016-07-26 08:09:34 +00:00
print_balanced_thickening ( rank , order , level , left_invariant , right_invariant , alphabet , stderr ) ;
2016-06-09 19:11:20 +00:00
}
2016-08-26 12:56:23 +00:00
*/
2016-06-09 19:11:20 +00:00
2016-08-26 12:56:23 +00:00
is_fat = 1 ;
for ( int i = 0 ; i < order ; i + + ) {
if ( level [ i ] = = 0 )
is_fat = 0 ;
}
if ( is_fat & & ! conflict ) {
// ERROR(balanced_count >= MAX_THICKENINGS, "Too many balanced thickenings! Increase MAX_THICKENINGS\n");
fwrite ( level , sizeof ( int ) , order , outfile ) ;
balanced_count + + ;
}
// print_thickening(rank, order, level, is_fat, !conflict, conflict, alphabet, stderr);
2016-07-26 08:09:34 +00:00
// now find the next one!
2016-08-26 12:56:23 +00:00
// try to find empty spot to the left of "head", but only if it is slim, as otherwise there is no point in adding even more
if ( ! conflict ) {
for ( i = head - 1 ; i > = 0 ; i - - )
if ( level [ i ] = = 0 )
break ;
if ( i > = 0 ) {
head = i ;
level [ head ] = 1000 ;
current_level + + ;
// print_thickening(rank, order, level, 0, 0, 0, alphabet, stderr);
continue ;
}
2016-06-09 19:11:20 +00:00
}
// if none was found, try to move "head" to the left
while ( current_level > 0 ) {
for ( i = head - 1 ; i > = 0 ; i - - )
2016-08-26 12:56:23 +00:00
if ( level [ i ] = = 0 | | level [ i ] > = current_level | | level [ i ] < = - current_level )
2016-06-09 19:11:20 +00:00
break ;
if ( i > = 0 ) { // if this was successful, just move head
level [ head ] = 0 ;
head = i ;
2016-08-26 12:56:23 +00:00
level [ head ] = 1000 ;
2016-06-09 19:11:20 +00:00
break ;
} else { // if moving the head is not possible, take the next head to the right
current_level - - ;
level [ head ] = 0 ;
do {
head + + ;
2016-08-26 12:56:23 +00:00
} while ( head < order & & level [ head ] ! = 1000 ) ;
2016-06-09 19:11:20 +00:00
}
}
// clean up
2016-08-26 12:56:23 +00:00
for ( int i = 0 ; i < order ; i + + )
if ( level [ i ] > = current_level & & level [ i ] ! = 1000 | | level [ i ] < = - current_level )
2016-06-09 19:11:20 +00:00
level [ i ] = 0 ;
2016-08-26 12:56:23 +00:00
// print_thickening(rank, order, level, 0, 0, 0, alphabet, stderr);
2016-06-09 19:11:20 +00:00
}
2016-07-26 08:09:34 +00:00
fprintf ( stderr , " \n " ) ;
fprintf ( stderr , " Found %ld thickenings, %ld fat, %ld slim, %ld balanced \n \n " , thickenings_count , fat_count , slim_count , balanced_count ) ;
2016-06-20 08:37:21 +00:00
2016-06-09 19:11:20 +00:00
free ( level ) ;
free ( left_invariant ) ;
free ( right_invariant ) ;
}