Dungeon Crawl Stone Soup Tracker - DCSS
|
|||||
Viewing Issue Advanced Details | |||||
|
|||||
ID: | Category: | Severity: | Reproducibility: | Date Submitted: | Last Update: |
3939 | Patches | feature | N/A | 2011-05-08 01:04 | 2011-10-13 18:57 |
|
|||||
Reporter: | brendan | Local or Remote: | Both | ||
Assigned To: | SamB | Operating System: | All | ||
Priority: | normal | Console or Tiles: | Both | ||
Status: | closed | Product Branch: | 0.10 ancient branch | ||
Product Version: | Resolution: | done | |||
Projection: | none | ||||
ETA: | none | Fixed in Branch: | 0.10 ancient branch | ||
|
|||||
Summary: | 0003939: Worley Noise Abyss terrain | ||||
Description: |
Worley Noise is a form of procedural noise based on tessellation of a surface into convex polyhedra. Attached is a first draft of a patch to generate more coherent Abyss terrain using Worley noise. When a player is banished, part of the dungeon within LOS_RADIUS is created in the abyss. |
||||
Steps To Reproduce: | |||||
Additional Information: | |||||
System Description: | |||||
Relationships | |||||
Attached Files: |
worley-abyss.patch [^] (19,909 bytes) 2011-05-08 01:04 [Show Content] [Hide Content]From e70af278434e20a219d6c3fa7771d4d6a4fdf651 Mon Sep 17 00:00:00 2001 From: Brendan Hickey <brendan@bhickey.net> Date: Sat, 7 May 2011 23:50:29 +0100 Subject: [PATCH] First draft of a worley noise abyss --- crawl-ref/source/abyss.cc | 61 +++++++--- crawl-ref/source/cellular.cc | 265 +++++++++++++++++++++++++++++++++++++++++ crawl-ref/source/cellular.h | 50 ++++++++ crawl-ref/source/makefile.obj | 1 + 4 files changed, 362 insertions(+), 15 deletions(-) create mode 100644 crawl-ref/source/cellular.cc create mode 100644 crawl-ref/source/cellular.h diff --git a/crawl-ref/source/abyss.cc b/crawl-ref/source/abyss.cc index 97000dd..0e1288b 100644 --- a/crawl-ref/source/abyss.cc +++ b/crawl-ref/source/abyss.cc @@ -9,10 +9,12 @@ #include <cstdlib> #include <algorithm> +#include <cmath> #include "abyss.h" #include "areas.h" #include "artefact.h" +#include "cellular.h" #include "cloud.h" #include "colour.h" #include "coord.h" @@ -90,6 +92,15 @@ static bool _place_feature_near(const coord_def ¢re, return (false); } +static unsigned int depth_of_abyss() +{ + int turn_adjustment = std::max(10, (env.turns_on_level / 750)); + int depth = you.experience_level * turn_adjustment; + if(you.penance[GOD_LUGONU]) + depth *= 2; + return std::max(200, depth); +} + //#define DEBUG_ABYSS // Returns a feature suitable for use in the proto-Abyss level. @@ -386,25 +397,25 @@ static int _abyss_create_items(const map_mask &abyss_genlevel_mask, static std::vector<dungeon_feature_type> _abyss_pick_terrain_elements() { std::vector<dungeon_feature_type> terrain_elements; - + const int depth = depth_of_abyss(); const int n_terrain_elements = 5; // Generate level composition vector. for (int i = 0; i < n_terrain_elements; i++) { - // Weights are in hundredths of a percentage; i.e. 5073 = - // 50.73%, 16 = 0.16%, etc. + terrain_elements.push_back( static_cast<dungeon_feature_type>( - random_choose_weighted(5073, DNGN_ROCK_WALL, - 2008, DNGN_STONE_WALL, - 914, DNGN_METAL_WALL, - 722, DNGN_LAVA, - 666, DNGN_SHALLOW_WATER, - 601, DNGN_DEEP_WATER, - 16, DNGN_CLOSED_DOOR, + random_choose_weighted(50, DNGN_ROCK_WALL, + 20, DNGN_STONE_WALL, + 90, DNGN_METAL_WALL, + (depth > 20 ? 72 : 5), DNGN_LAVA, + (depth > 20 ? 10 : 66), DNGN_SHALLOW_WATER, + (depth > 15 ? 60 : 10), DNGN_DEEP_WATER, + 1, DNGN_CLOSED_DOOR, 0))); } + std::sort(terrain_elements.begin(), terrain_elements.end()); return (terrain_elements); } @@ -480,6 +491,14 @@ static dungeon_feature_type _abyss_pick_altar() return (altar); } +static void cellular_distance(double *F, unsigned long *ID, const coord_def p, double z, double scalar) { + double x = (p.x + sin(p.x / 8.0)) / scalar; + double y = (p.y + sin(p.y / 8.0)) / scalar; + double point[3] = {x,y,z}; + double delta[2][3]; + worley::Worley(point, 2, F, delta, ID); +} + static void _abyss_apply_terrain(const map_mask &abyss_genlevel_mask) { const std::vector<dungeon_feature_type> terrain_elements = @@ -500,19 +519,30 @@ static void _abyss_apply_terrain(const map_mask &abyss_genlevel_mask) int altars_wanted = 0; bool use_abyss_exit_map = true; - const int floor_density = random_range(30, 95); + const int floor_density = random_range(90,110) + depth_of_abyss()/20; + double cell_scalar = random_range(2,4); + if(depth_of_abyss() > 20) { + cell_scalar -= 0.5; + } + const unsigned long offset = random_range(1,0x7FFFFFF); for (rectangle_iterator ri(MAPGEN_BORDER); ri; ++ri) { const coord_def p(*ri); + double dist[2]; + unsigned long id[2]; + cellular_distance(dist, id, p, offset, cell_scalar); if (!abyss_genlevel_mask(p) || map_masked(p, MMT_VAULT)) continue; - - if (x_chance_in_y(floor_density, 100)) + if (dist[0] * 100 < floor_density) grd(p) = DNGN_FLOOR; - else if (grd(p) == DNGN_UNSEEN) - grd(p) = terrain_elements[random2(n_terrain_elements)]; + else if (grd(p) == DNGN_UNSEEN) { + int index = (dist[0] * 2) / dist[1]; + index %= 2; + cellular_distance(dist, id, p, offset * 2, cell_scalar * 1.3); + grd(p) = terrain_elements[(id[0] + index)%n_terrain_elements]; + } // Place abyss exits, stone arches, and altars to liven up the scene: (_abyss_check_place_feat(p, exit_chance, @@ -531,6 +561,7 @@ static void _abyss_apply_terrain(const map_mask &abyss_genlevel_mask) DNGN_STONE_ARCH, abyss_genlevel_mask)); } + } static int _abyss_place_vaults(const map_mask &abyss_genlevel_mask) diff --git a/crawl-ref/source/cellular.cc b/crawl-ref/source/cellular.cc new file mode 100644 index 0000000..78867e4 --- /dev/null +++ b/crawl-ref/source/cellular.cc @@ -0,0 +1,265 @@ +/* Copyright 1994, 2002 by Steven Worley + This software may be modified and redistributed without restriction + provided this comment header remains intact in the source code. + This code is provided with no warrantee, express or implied, for + any purpose. + + A detailed description and application examples can be found in the + 1996 SIGGRAPH paper "A Cellular Texture Basis Function" and + especially in the 2002 book "Texturing and Modeling, a Procedural + Approach, 3rd edition." There is also extra information on the web + site http://www.worley.com/cellular.html . + + If you do find interesting uses for this tool, and especially if + you enhance it, please drop me an email at steve@worley.com. */ + +#include <math.h> +#include <stdio.h> +#include "cellular.h" /* Function prototype */ + +namespace worley { +/* This macro is a *lot* faster than using (long)floor() on an x86 CPU. + It actually speeds up the entire Worley() call with almost 10%. + Added by Stefan Gustavson, October 2003. */ +#define LFLOOR(x) ((x)<0 ? ((long)x-1) : ((long)x) ) + +/* A hardwired lookup table to quickly determine how many feature + points should be in each spatial cube. We use a table so we don't + need to make multiple slower tests. A random number indexed into + this array will give an approximate Poisson distribution of mean + density 2.5. Read the book for the longwinded explanation. */ +static int Poisson_count[256]= +{4,3,1,1,1,2,4,2,2,2,5,1,0,2,1,2,2,0,4,3,2,1,2,1,3,2,2,4,2,2,5,1,2,3,2,2,2,2,2,3, + 2,4,2,5,3,2,2,2,5,3,3,5,2,1,3,3,4,4,2,3,0,4,2,2,2,1,3,2,2,2,3,3,3,1,2,0,2,1,1,2, + 2,2,2,5,3,2,3,2,3,2,2,1,0,2,1,1,2,1,2,2,1,3,4,2,2,2,5,4,2,4,2,2,5,4,3,2,2,5,4,3, + 3,3,5,2,2,2,2,2,3,1,1,4,2,1,3,3,4,3,2,4,3,3,3,4,5,1,4,2,4,3,1,2,3,5,3,2,1,3,1,3, + 3,3,2,3,1,5,5,4,2,2,4,1,3,4,1,5,3,3,5,3,4,3,2,2,1,1,1,1,1,2,4,5,4,5,4,2,1,5,1,1, + 2,3,3,3,2,5,2,3,3,2,0,2,1,1,4,2,1,3,2,1,2,2,3,2,5,5,3,4,5,5,2,4,4,5,3,2,2,2,1,4, + 2,3,3,4,2,5,4,2,4,2,2,2,4,5,3,2}; + +/* This constant is manipulated to make sure that the mean value of F[0] + is 1.0. This makes an easy natural "scale" size of the cellular features. */ +#define DENSITY_ADJUSTMENT 0.398150 + +/* the function to merge-sort a "cube" of samples into the current best-found + list of values. */ +static void AddSamples(long xi, long yi, long zi, long max_order, + double at[3], double *F, + double (*delta)[3], unsigned long *ID); + + +/* The main function! */ +void Worley(double at[3], long max_order, + double *F, double (*delta)[3], unsigned long *ID) +{ + double x2,y2,z2, mx2, my2, mz2; + double new_at[3]; + long int_at[3], i; + + /* Initialize the F values to "huge" so they will be replaced by the + first real sample tests. Note we'll be storing and comparing the + SQUARED distance from the feature points to avoid lots of slow + sqrt() calls. We'll use sqrt() only on the final answer. */ + for (i=0; i<max_order; i++) F[i]=999999.9; + + /* Make our own local copy, multiplying to make mean(F[0])==1.0 */ + new_at[0]=DENSITY_ADJUSTMENT*at[0]; + new_at[1]=DENSITY_ADJUSTMENT*at[1]; + new_at[2]=DENSITY_ADJUSTMENT*at[2]; + + /* Find the integer cube holding the hit point */ + int_at[0]=LFLOOR(new_at[0]); /* The macro makes this part a lot faster */ + int_at[1]=LFLOOR(new_at[1]); + int_at[2]=LFLOOR(new_at[2]); + + /* A simple way to compute the closest neighbors would be to test all + boundary cubes exhaustively. This is simple with code like: + { + long ii, jj, kk; + for (ii=-1; ii<=1; ii++) for (jj=-1; jj<=1; jj++) for (kk=-1; kk<=1; kk++) + AddSamples(int_at[0]+ii,int_at[1]+jj,int_at[2]+kk, + max_order, new_at, F, delta, ID); + } + But this wastes a lot of time working on cubes which are known to be + too far away to matter! So we can use a more complex testing method + that avoids this needless testing of distant cubes. This doubles the + speed of the algorithm. */ + + /* Test the central cube for closest point(s). */ + AddSamples(int_at[0], int_at[1], int_at[2], max_order, new_at, F, delta, ID); + + /* We test if neighbor cubes are even POSSIBLE contributors by examining the + combinations of the sum of the squared distances from the cube's lower + or upper corners.*/ + x2=new_at[0]-int_at[0]; + y2=new_at[1]-int_at[1]; + z2=new_at[2]-int_at[2]; + mx2=(1.0-x2)*(1.0-x2); + my2=(1.0-y2)*(1.0-y2); + mz2=(1.0-z2)*(1.0-z2); + x2*=x2; + y2*=y2; + z2*=z2; + + /* Test 6 facing neighbors of center cube. These are closest and most + likely to have a close feature point. */ + if (x2<F[max_order-1]) AddSamples(int_at[0]-1, int_at[1] , int_at[2] , + max_order, new_at, F, delta, ID); + if (y2<F[max_order-1]) AddSamples(int_at[0] , int_at[1]-1, int_at[2] , + max_order, new_at, F, delta, ID); + if (z2<F[max_order-1]) AddSamples(int_at[0] , int_at[1] , int_at[2]-1, + max_order, new_at, F, delta, ID); + + if (mx2<F[max_order-1]) AddSamples(int_at[0]+1, int_at[1] , int_at[2] , + max_order, new_at, F, delta, ID); + if (my2<F[max_order-1]) AddSamples(int_at[0] , int_at[1]+1, int_at[2] , + max_order, new_at, F, delta, ID); + if (mz2<F[max_order-1]) AddSamples(int_at[0] , int_at[1] , int_at[2]+1, + max_order, new_at, F, delta, ID); + + /* Test 12 "edge cube" neighbors if necessary. They're next closest. */ + if ( x2+ y2<F[max_order-1]) AddSamples(int_at[0]-1, int_at[1]-1, int_at[2] , + max_order, new_at, F, delta, ID); + if ( x2+ z2<F[max_order-1]) AddSamples(int_at[0]-1, int_at[1] , int_at[2]-1, + max_order, new_at, F, delta, ID); + if ( y2+ z2<F[max_order-1]) AddSamples(int_at[0] , int_at[1]-1, int_at[2]-1, + max_order, new_at, F, delta, ID); + if (mx2+my2<F[max_order-1]) AddSamples(int_at[0]+1, int_at[1]+1, int_at[2] , + max_order, new_at, F, delta, ID); + if (mx2+mz2<F[max_order-1]) AddSamples(int_at[0]+1, int_at[1] , int_at[2]+1, + max_order, new_at, F, delta, ID); + if (my2+mz2<F[max_order-1]) AddSamples(int_at[0] , int_at[1]+1, int_at[2]+1, + max_order, new_at, F, delta, ID); + if ( x2+my2<F[max_order-1]) AddSamples(int_at[0]-1, int_at[1]+1, int_at[2] , + max_order, new_at, F, delta, ID); + if ( x2+mz2<F[max_order-1]) AddSamples(int_at[0]-1, int_at[1] , int_at[2]+1, + max_order, new_at, F, delta, ID); + if ( y2+mz2<F[max_order-1]) AddSamples(int_at[0] , int_at[1]-1, int_at[2]+1, + max_order, new_at, F, delta, ID); + if (mx2+ y2<F[max_order-1]) AddSamples(int_at[0]+1, int_at[1]-1, int_at[2] , + max_order, new_at, F, delta, ID); + if (mx2+ z2<F[max_order-1]) AddSamples(int_at[0]+1, int_at[1] , int_at[2]-1, + max_order, new_at, F, delta, ID); + if (my2+ z2<F[max_order-1]) AddSamples(int_at[0] , int_at[1]+1, int_at[2]-1, + max_order, new_at, F, delta, ID); + + /* Final 8 "corner" cubes */ + if ( x2+ y2+ z2<F[max_order-1]) AddSamples(int_at[0]-1, int_at[1]-1, int_at[2]-1, + max_order, new_at, F, delta, ID); + if ( x2+ y2+mz2<F[max_order-1]) AddSamples(int_at[0]-1, int_at[1]-1, int_at[2]+1, + max_order, new_at, F, delta, ID); + if ( x2+my2+ z2<F[max_order-1]) AddSamples(int_at[0]-1, int_at[1]+1, int_at[2]-1, + max_order, new_at, F, delta, ID); + if ( x2+my2+mz2<F[max_order-1]) AddSamples(int_at[0]-1, int_at[1]+1, int_at[2]+1, + max_order, new_at, F, delta, ID); + if (mx2+ y2+ z2<F[max_order-1]) AddSamples(int_at[0]+1, int_at[1]-1, int_at[2]-1, + max_order, new_at, F, delta, ID); + if (mx2+ y2+mz2<F[max_order-1]) AddSamples(int_at[0]+1, int_at[1]-1, int_at[2]+1, + max_order, new_at, F, delta, ID); + if (mx2+my2+ z2<F[max_order-1]) AddSamples(int_at[0]+1, int_at[1]+1, int_at[2]-1, + max_order, new_at, F, delta, ID); + if (mx2+my2+mz2<F[max_order-1]) AddSamples(int_at[0]+1, int_at[1]+1, int_at[2]+1, + max_order, new_at, F, delta, ID); + + /* We're done! Convert everything to right size scale */ + for (i=0; i<max_order; i++) + { + F[i]=sqrt(F[i])*(1.0/DENSITY_ADJUSTMENT); + delta[i][0]*=(1.0/DENSITY_ADJUSTMENT); + delta[i][1]*=(1.0/DENSITY_ADJUSTMENT); + delta[i][2]*=(1.0/DENSITY_ADJUSTMENT); + } + + return; +} + + + +static void AddSamples(long xi, long yi, long zi, long max_order, + double at[3], double *F, + double (*delta)[3], unsigned long *ID) +{ + double dx, dy, dz, fx, fy, fz, d2; + long count, i, j, index; + unsigned long seed, this_id; + + /* Each cube has a random number seed based on the cube's ID number. + The seed might be better if it were a nonlinear hash like Perlin uses + for noise but we do very well with this faster simple one. + Our LCG uses Knuth-approved constants for maximal periods. */ + seed=702395077*xi + 915488749*yi + 2120969693*zi; + + /* How many feature points are in this cube? */ + count=Poisson_count[seed>>24]; /* 256 element lookup table. Use MSB */ + + seed=1402024253*seed+586950981; /* churn the seed with good Knuth LCG */ + + for (j=0; j<count; j++) /* test and insert each point into our solution */ + { + this_id=seed; + seed=1402024253*seed+586950981; /* churn */ + + /* compute the 0..1 feature point location's XYZ */ + fx=(seed+0.5)*(1.0/4294967296.0); + seed=1402024253*seed+586950981; /* churn */ + fy=(seed+0.5)*(1.0/4294967296.0); + seed=1402024253*seed+586950981; /* churn */ + fz=(seed+0.5)*(1.0/4294967296.0); + seed=1402024253*seed+586950981; /* churn */ + + /* delta from feature point to sample location */ + dx=xi+fx-at[0]; + dy=yi+fy-at[1]; + dz=zi+fz-at[2]; + + /* Distance computation! Lots of interesting variations are + possible here! + Biased "stretched" A*dx*dx+B*dy*dy+C*dz*dz + Manhattan distance fabs(dx)+fabs(dy)+fabs(dz) + Radial Manhattan: A*fabs(dR)+B*fabs(dTheta)+C*dz + Superquadratic: pow(fabs(dx), A) + pow(fabs(dy), B) + pow(fabs(dz),C) + + Go ahead and make your own! Remember that you must insure that + new distance function causes large deltas in 3D space to map into + large deltas in your distance function, so our 3D search can find + them! [Alternatively, change the search algorithm for your special + cases.] + */ + + d2=dx*dx+dy*dy+dz*dz; /* Euclidian distance, squared */ + + if (d2<F[max_order-1]) /* Is this point close enough to rememember? */ + { + /* Insert the information into the output arrays if it's close enough. + We use an insertion sort. No need for a binary search to find + the appropriate index.. usually we're dealing with order 2,3,4 so + we can just go through the list. If you were computing order 50 + (wow!!) you could get a speedup with a binary search in the sorted + F[] list. */ + + index=max_order; + while (index>0 && d2<F[index-1]) index--; + + /* We insert this new point into slot # <index> */ + + /* Bump down more distant information to make room for this new point. */ + for (i=max_order-2; i>=index; i--) + { + F[i+1]=F[i]; + ID[i+1]=ID[i]; + delta[i+1][0]=delta[i][0]; + delta[i+1][1]=delta[i][1]; + delta[i+1][2]=delta[i][2]; + } + /* Insert the new point's information into the list. */ + F[index]=d2; + ID[index]=this_id; + delta[index][0]=dx; + delta[index][1]=dy; + delta[index][2]=dz; + } + } + + return; +} +} diff --git a/crawl-ref/source/cellular.h b/crawl-ref/source/cellular.h new file mode 100644 index 0000000..52167f3 --- /dev/null +++ b/crawl-ref/source/cellular.h @@ -0,0 +1,50 @@ +/* Copyright 1994, 2002 by Steven Worley + This software may be modified and redistributed without restriction + provided this comment header remains intact in the source code. + This code is provided with no warrantee, express or implied, for + any purpose. + + A detailed description and application examples can be found in the + 1996 SIGGRAPH paper "A Cellular Texture Basis Function" and + especially in the 2002 book "Texturing and Modeling, a Procedural + Approach, 3rd edition." There is also extra information on the web + site http://www.worley.com/cellular.html . + + If you do find interesting uses for this tool, and especially if + you enhance it, please drop me an email at steve@worley.com. */ + + + +/* Worley() + + An implementation of the key cellular texturing basis + function. This function is hardwired to return an average F_1 value + of 1.0. It returns the <n> most closest feature point distances + F_1, F_2, .. F_n the vector delta to those points, and a 32 bit + seed for each of the feature points. This function is not + difficult to extend to compute alternative information such as + higher order F values, to use the Manhattan distance metric, or + other fun perversions. + + <at> The input sample location. + <max_order> Smaller values compute faster. < 5, read the book to extend it. + <F> The output values of F_1, F_2, ..F[n] in F[0], F[1], F[n-1] + <delta> The output vector difference between the sample point and the n-th + closest feature point. Thus, the feature point's location is the + hit point minus this value. The DERIVATIVE of F is the unit + normalized version of this vector. + <ID> The output 32 bit ID number which labels the feature point. This + is useful for domain partitions, especially for coloring flagstone + patterns. + + This implementation is tuned for speed in a way that any order > 5 + will likely have discontinuous artifacts in its computation of F5+. + This can be fixed by increasing the internal points-per-cube + density in the source code, at the expense of slower + computation. The book lists the details of this tuning. */ +namespace worley { +void Worley(double at[3], long max_order, + double F[2], double delta[2][3], unsigned long ID[2]); +} + + diff --git a/crawl-ref/source/makefile.obj b/crawl-ref/source/makefile.obj index 28c0f07..cd8a693 100644 --- a/crawl-ref/source/makefile.obj +++ b/crawl-ref/source/makefile.obj @@ -13,6 +13,7 @@ beam.o \ behold.o \ bitary.o \ branch.o \ +cellular.o \ chardump.o \ cio.o \ cloud.o \ -- 1.7.3.2 0001-A-worley-noise-abyss.patch [^] (27,805 bytes) 2011-05-23 21:12 [Show Content] [Hide Content] From c0c1680d47e314c726a8e4b1ea242e3e3d3b8326 Mon Sep 17 00:00:00 2001 From: Brendan Hickey <brendan@bhickey.net> Date: Mon, 23 May 2011 20:08:11 +0100 Subject: [PATCH] A worley-noise abyss --- crawl-ref/source/abyss.cc | 176 +++++++++++++++++++------ crawl-ref/source/abyss.h | 1 + crawl-ref/source/cellular.cc | 283 +++++++++++++++++++++++++++++++++++++++++ crawl-ref/source/cellular.h | 53 ++++++++ crawl-ref/source/dungeon.cc | 2 +- crawl-ref/source/effects.cc | 2 + crawl-ref/source/makefile.obj | 1 + 7 files changed, 475 insertions(+), 43 deletions(-) create mode 100644 crawl-ref/source/cellular.cc create mode 100644 crawl-ref/source/cellular.h diff --git a/crawl-ref/source/abyss.cc b/crawl-ref/source/abyss.cc index 97000dd..766b459 100644 --- a/crawl-ref/source/abyss.cc +++ b/crawl-ref/source/abyss.cc @@ -7,12 +7,14 @@ #include "abyss.h" +#include <cmath> #include <cstdlib> #include <algorithm> #include "abyss.h" #include "areas.h" #include "artefact.h" +#include "cellular.h" #include "cloud.h" #include "colour.h" #include "coord.h" @@ -56,6 +58,8 @@ #endif const int ABYSSAL_RUNE_MAX_ROLL = 200; +bool just_banished = false; +std::vector<dungeon_feature_type> abyssal_features; // If not_seen is true, don't place the feature where it can be seen from // the centre. @@ -104,40 +108,36 @@ static dungeon_feature_type _abyss_proto_feature() 0)); } -// Generate the initial (proto) Abyss level. The proto Abyss is where -// the player lands when they arrive in the Abyss from elsewhere. -// _generate_area generates all other Abyss areas. -void generate_abyss() +void _write_abyssal_features() { - env.level_build_method += " abyss"; - env.level_layout_types.insert("abyss"); - - dprf("generate_abyss(); turn_on_level: %d", env.turns_on_level); - - for (rectangle_iterator ri(MAPGEN_BORDER); ri; ++ri) - grd(*ri) = _abyss_proto_feature(); + if (abyssal_features.empty()) + return; - // If we're starting out in the Abyss, make sure the starting grid is - // an altar to Lugonu and there's an exit near-by. - // Otherwise, we start out on floor and there's a chance there's an - // altar near-by. - if (you.char_direction == GDT_GAME_START) - { - grd(ABYSS_CENTRE) = DNGN_ALTAR_LUGONU; - _place_feature_near(ABYSS_CENTRE, LOS_RADIUS + 2, - DNGN_FLOOR, DNGN_EXIT_ABYSS, 50, true); - } - else + const int count = abyssal_features.size(); + const int scalar = 0xFF; + int index = 0; + for (radius_iterator ri(ABYSS_CENTRE, LOS_RADIUS, C_ROUND); ri; ++ri) { - grd(ABYSS_CENTRE) = DNGN_FLOOR; - if (one_chance_in(5)) - { - _place_feature_near(ABYSS_CENTRE, LOS_RADIUS, - DNGN_FLOOR, DNGN_ALTAR_LUGONU, 50); + const int dist = distance(ABYSS_CENTRE, *ri); + int chance = pow(0.98, dist) * scalar; + if (!map_masked(*ri, MMT_VAULT)) { + if (dist < 4 || x_chance_in_y(chance, scalar)) { + grd(*ri) = abyssal_features[index]; + } + else + { + //Entombing the player is lame. + grd(*ri) = DNGN_FLOOR; + } } + + ++index; + if (index > count) + return; } } + // Returns the roll to use to check if we want to create an abyssal rune. static int _abyssal_rune_roll() { @@ -383,19 +383,23 @@ static int _abyss_create_items(const map_mask &abyss_genlevel_mask, return (items_placed); } -static std::vector<dungeon_feature_type> _abyss_pick_terrain_elements() +static std::vector<dungeon_feature_type> _abyss_pick_terrain_elements(bool for_proto = false) { std::vector<dungeon_feature_type> terrain_elements; - const int n_terrain_elements = 5; + const int n_terrain_elements = random_range(5,7); // Generate level composition vector. for (int i = 0; i < n_terrain_elements; i++) { - // Weights are in hundredths of a percentage; i.e. 5073 = - // 50.73%, 16 = 0.16%, etc. - terrain_elements.push_back( - static_cast<dungeon_feature_type>( + dungeon_feature_type feat; + if (for_proto) + { + feat = _abyss_proto_feature(); + } + else + { + feat = static_cast<dungeon_feature_type>( random_choose_weighted(5073, DNGN_ROCK_WALL, 2008, DNGN_STONE_WALL, 914, DNGN_METAL_WALL, @@ -403,11 +407,49 @@ static std::vector<dungeon_feature_type> _abyss_pick_terrain_elements() 666, DNGN_SHALLOW_WATER, 601, DNGN_DEEP_WATER, 16, DNGN_CLOSED_DOOR, - 0))); + 0)); + } + terrain_elements.push_back(feat); } + return (terrain_elements); } +void push_features_to_abyss() +{ + just_banished = true; + abyssal_features.clear(); + + for (radius_iterator ri(you.pos(), LOS_RADIUS, C_ROUND); ri; ++ri) + { + dungeon_feature_type feature = grd(*ri); + if (feat_is_stair(feature)) + feature = (one_chance_in(3) ? DNGN_STONE_ARCH : DNGN_FLOOR); + + if (feat_is_altar(feature)) + feature = (one_chance_in(9) ? DNGN_ALTAR_XOM : DNGN_FLOOR); + + if (feat_is_trap(feature, true)) + feature = DNGN_FLOOR; + + switch (feature) + { + // demote permarock + case DNGN_PERMAROCK_WALL: + feature = DNGN_ROCK_WALL; + break; + case DNGN_CLEAR_PERMAROCK_WALL: + feature = DNGN_CLEAR_ROCK_WALL; + case DNGN_SLIMY_WALL: + feature = DNGN_GREEN_CRYSTAL_WALL; + default: + // handle more terrain types. + break; + } + abyssal_features.push_back(feature); + } +} + // Returns N so that the chance of placing an abyss exit on any given // square is 1 in N. static int _abyss_exit_chance() @@ -480,10 +522,9 @@ static dungeon_feature_type _abyss_pick_altar() return (altar); } -static void _abyss_apply_terrain(const map_mask &abyss_genlevel_mask) +static void _abyss_apply_terrain(const map_mask &abyss_genlevel_mask, bool proto=false) { - const std::vector<dungeon_feature_type> terrain_elements = - _abyss_pick_terrain_elements(); + const std::vector<dungeon_feature_type> terrain_elements = _abyss_pick_terrain_elements(proto); if (one_chance_in(3)) _abyss_create_rooms(abyss_genlevel_mask, random_range(1, 10)); @@ -500,20 +541,26 @@ static void _abyss_apply_terrain(const map_mask &abyss_genlevel_mask) int altars_wanted = 0; bool use_abyss_exit_map = true; - const int floor_density = random_range(30, 95); + const double abyss_id = random2(0x7FFFFF); + const double floor_density = 120; + const int column_chance = 4; for (rectangle_iterator ri(MAPGEN_BORDER); ri; ++ri) { const coord_def p(*ri); + worley::noise_datum noise = worley::worley(p.x/2.2, p.y/2.2, abyss_id); if (!abyss_genlevel_mask(p) || map_masked(p, MMT_VAULT)) continue; - if (x_chance_in_y(floor_density, 100)) + if (floor_density > noise.first_order * 100 + && !one_chance_in(column_chance)) grd(p) = DNGN_FLOOR; - else if (grd(p) == DNGN_UNSEEN) - grd(p) = terrain_elements[random2(n_terrain_elements)]; - + else if (grd(p) == DNGN_UNSEEN) + { + int id = (noise.id + one_chance_in(3)) % n_terrain_elements; + grd(p) = terrain_elements[id]; + } // Place abyss exits, stone arches, and altars to liven up the scene: (_abyss_check_place_feat(p, exit_chance, &exits_wanted, @@ -531,8 +578,53 @@ static void _abyss_apply_terrain(const map_mask &abyss_genlevel_mask) DNGN_STONE_ARCH, abyss_genlevel_mask)); } + } +// Generate the initial (proto) Abyss level. The proto Abyss is where +// the player lands when they arrive in the Abyss from elsewhere. +// _generate_area generates all other Abyss areas. +void generate_abyss() +{ + env.level_build_method += " abyss"; + env.level_layout_types.insert("abyss"); + + dprf("generate_abyss(); turn_on_level: %d", env.turns_on_level); + + map_mask abyss_genlevel_mask; + _abyss_apply_terrain(abyss_genlevel_mask, true); + + if (just_banished) + { + _write_abyssal_features(); + just_banished = false; + } + + // If we're starting out in the Abyss, make sure the starting grid is + // an altar to Lugonu and there's an exit near-by. + // Otherwise, we start out on floor and there's a chance there's an + // altar near-by. + if (you.char_direction == GDT_GAME_START) + { + grd(ABYSS_CENTRE) = DNGN_ALTAR_LUGONU; + _place_feature_near(ABYSS_CENTRE, LOS_RADIUS + 2, + DNGN_FLOOR, DNGN_EXIT_ABYSS, 50, true); + } + else + { + grd(ABYSS_CENTRE) = DNGN_FLOOR; + if (one_chance_in(5)) + { + _place_feature_near(ABYSS_CENTRE, LOS_RADIUS, + DNGN_FLOOR, DNGN_ALTAR_LUGONU, 50); + } + } + + generate_random_blood_spatter_on_level(&abyss_genlevel_mask); + setup_environment_effects(); + +} + static int _abyss_place_vaults(const map_mask &abyss_genlevel_mask) { unwind_vault_placement_mask vaultmask(&abyss_genlevel_mask); diff --git a/crawl-ref/source/abyss.h b/crawl-ref/source/abyss.h index 7499d0e..9f395c8 100644 --- a/crawl-ref/source/abyss.h +++ b/crawl-ref/source/abyss.h @@ -22,4 +22,5 @@ bool is_level_incorruptible(); bool lugonu_corrupt_level(int power); void run_corruption_effects(int duration); +void push_features_to_abyss(); #endif diff --git a/crawl-ref/source/cellular.cc b/crawl-ref/source/cellular.cc new file mode 100644 index 0000000..0b21143 --- /dev/null +++ b/crawl-ref/source/cellular.cc @@ -0,0 +1,283 @@ +/* Copyright 1994, 2002 by Steven Worley + This software may be modified and redistributed without restriction + provided this comment header remains intact in the source code. + This code is provided with no warrantee, express or implied, for + any purpose. + + A detailed description and application examples can be found in the + 1996 SIGGRAPH paper "A Cellular Texture Basis Function" and + especially in the 2002 book "Texturing and Modeling, a Procedural + Approach, 3rd edition." There is also extra information on the web + site http://www.worley.com/cellular.html . + + If you do find interesting uses for this tool, and especially if + you enhance it, please drop me an email at steve@worley.com. */ + +#include <math.h> +#include <stdio.h> +#include <stdint.h> +#include "cellular.h" /* Function prototype */ + +namespace worley { + /* This macro is a *lot* faster than using (int32_t)floor() on an x86 CPU. + It actually speeds up the entire Worley() call with almost 10%. + Added by Stefan Gustavson, October 2003. */ +#define LFLOOR(x) ((x)<0 ? ((int32_t)x-1) : ((int32_t)x) ) + + /* A hardwired lookup table to quickly determine how many feature + points should be in each spatial cube. We use a table so we don't + need to make multiple slower tests. A random number indexed into + this array will give an approximate Poisson distribution of mean + density 2.5. Read the book for the int32_twinded explanation. */ + static int Poisson_count[256]= + {4,3,1,1,1,2,4,2,2,2,5,1,0,2,1,2,2,0,4,3,2,1,2,1,3,2,2,4,2,2,5,1,2,3,2,2,2,2,2,3, + 2,4,2,5,3,2,2,2,5,3,3,5,2,1,3,3,4,4,2,3,0,4,2,2,2,1,3,2,2,2,3,3,3,1,2,0,2,1,1,2, + 2,2,2,5,3,2,3,2,3,2,2,1,0,2,1,1,2,1,2,2,1,3,4,2,2,2,5,4,2,4,2,2,5,4,3,2,2,5,4,3, + 3,3,5,2,2,2,2,2,3,1,1,4,2,1,3,3,4,3,2,4,3,3,3,4,5,1,4,2,4,3,1,2,3,5,3,2,1,3,1,3, + 3,3,2,3,1,5,5,4,2,2,4,1,3,4,1,5,3,3,5,3,4,3,2,2,1,1,1,1,1,2,4,5,4,5,4,2,1,5,1,1, + 2,3,3,3,2,5,2,3,3,2,0,2,1,1,4,2,1,3,2,1,2,2,3,2,5,5,3,4,5,5,2,4,4,5,3,2,2,2,1,4, + 2,3,3,4,2,5,4,2,4,2,2,2,4,5,3,2}; + + /* This constant is manipulated to make sure that the mean value of F[0] + is 1.0. This makes an easy natural "scale" size of the cellular features. */ +#define DENSITY_ADJUSTMENT 0.398150 + + /* the function to merge-sort a "cube" of samples into the current best-found + list of values. */ + static void AddSamples(int32_t xi, int32_t yi, int32_t zi, int32_t max_order, + double at[3], double *F, + double (*delta)[3], uint32_t *ID); + + + /* The main function! */ + void Worley(double at[3], int32_t max_order, + double *F, double (*delta)[3], uint32_t *ID) + { + double x2,y2,z2, mx2, my2, mz2; + double new_at[3]; + int32_t int_at[3], i; + + /* Initialize the F values to "huge" so they will be replaced by the + first real sample tests. Note we'll be storing and comparing the + SQUARED distance from the feature points to avoid lots of slow + sqrt() calls. We'll use sqrt() only on the final answer. */ + for (i=0; i<max_order; i++) F[i]=999999.9; + + /* Make our own local copy, multiplying to make mean(F[0])==1.0 */ + new_at[0]=DENSITY_ADJUSTMENT*at[0]; + new_at[1]=DENSITY_ADJUSTMENT*at[1]; + new_at[2]=DENSITY_ADJUSTMENT*at[2]; + + /* Find the integer cube holding the hit point */ + int_at[0]=LFLOOR(new_at[0]); /* The macro makes this part a lot faster */ + int_at[1]=LFLOOR(new_at[1]); + int_at[2]=LFLOOR(new_at[2]); + + /* A simple way to compute the closest neighbors would be to test all + boundary cubes exhaustively. This is simple with code like: + { + int32_t ii, jj, kk; + for (ii=-1; ii<=1; ii++) for (jj=-1; jj<=1; jj++) for (kk=-1; kk<=1; kk++) + AddSamples(int_at[0]+ii,int_at[1]+jj,int_at[2]+kk, + max_order, new_at, F, delta, ID); + } + But this wastes a lot of time working on cubes which are known to be + too far away to matter! So we can use a more complex testing method + that avoids this needless testing of distant cubes. This doubles the + speed of the algorithm. */ + + /* Test the central cube for closest point(s). */ + AddSamples(int_at[0], int_at[1], int_at[2], max_order, new_at, F, delta, ID); + + /* We test if neighbor cubes are even POSSIBLE contributors by examining the + combinations of the sum of the squared distances from the cube's lower + or upper corners.*/ + x2=new_at[0]-int_at[0]; + y2=new_at[1]-int_at[1]; + z2=new_at[2]-int_at[2]; + mx2=(1.0-x2)*(1.0-x2); + my2=(1.0-y2)*(1.0-y2); + mz2=(1.0-z2)*(1.0-z2); + x2*=x2; + y2*=y2; + z2*=z2; + + /* Test 6 facing neighbors of center cube. These are closest and most + likely to have a close feature point. */ + if (x2<F[max_order-1]) AddSamples(int_at[0]-1, int_at[1] , int_at[2] , + max_order, new_at, F, delta, ID); + if (y2<F[max_order-1]) AddSamples(int_at[0] , int_at[1]-1, int_at[2] , + max_order, new_at, F, delta, ID); + if (z2<F[max_order-1]) AddSamples(int_at[0] , int_at[1] , int_at[2]-1, + max_order, new_at, F, delta, ID); + + if (mx2<F[max_order-1]) AddSamples(int_at[0]+1, int_at[1] , int_at[2] , + max_order, new_at, F, delta, ID); + if (my2<F[max_order-1]) AddSamples(int_at[0] , int_at[1]+1, int_at[2] , + max_order, new_at, F, delta, ID); + if (mz2<F[max_order-1]) AddSamples(int_at[0] , int_at[1] , int_at[2]+1, + max_order, new_at, F, delta, ID); + + /* Test 12 "edge cube" neighbors if necessary. They're next closest. */ + if ( x2+ y2<F[max_order-1]) AddSamples(int_at[0]-1, int_at[1]-1, int_at[2] , + max_order, new_at, F, delta, ID); + if ( x2+ z2<F[max_order-1]) AddSamples(int_at[0]-1, int_at[1] , int_at[2]-1, + max_order, new_at, F, delta, ID); + if ( y2+ z2<F[max_order-1]) AddSamples(int_at[0] , int_at[1]-1, int_at[2]-1, + max_order, new_at, F, delta, ID); + if (mx2+my2<F[max_order-1]) AddSamples(int_at[0]+1, int_at[1]+1, int_at[2] , + max_order, new_at, F, delta, ID); + if (mx2+mz2<F[max_order-1]) AddSamples(int_at[0]+1, int_at[1] , int_at[2]+1, + max_order, new_at, F, delta, ID); + if (my2+mz2<F[max_order-1]) AddSamples(int_at[0] , int_at[1]+1, int_at[2]+1, + max_order, new_at, F, delta, ID); + if ( x2+my2<F[max_order-1]) AddSamples(int_at[0]-1, int_at[1]+1, int_at[2] , + max_order, new_at, F, delta, ID); + if ( x2+mz2<F[max_order-1]) AddSamples(int_at[0]-1, int_at[1] , int_at[2]+1, + max_order, new_at, F, delta, ID); + if ( y2+mz2<F[max_order-1]) AddSamples(int_at[0] , int_at[1]-1, int_at[2]+1, + max_order, new_at, F, delta, ID); + if (mx2+ y2<F[max_order-1]) AddSamples(int_at[0]+1, int_at[1]-1, int_at[2] , + max_order, new_at, F, delta, ID); + if (mx2+ z2<F[max_order-1]) AddSamples(int_at[0]+1, int_at[1] , int_at[2]-1, + max_order, new_at, F, delta, ID); + if (my2+ z2<F[max_order-1]) AddSamples(int_at[0] , int_at[1]+1, int_at[2]-1, + max_order, new_at, F, delta, ID); + + /* Final 8 "corner" cubes */ + if ( x2+ y2+ z2<F[max_order-1]) AddSamples(int_at[0]-1, int_at[1]-1, int_at[2]-1, + max_order, new_at, F, delta, ID); + if ( x2+ y2+mz2<F[max_order-1]) AddSamples(int_at[0]-1, int_at[1]-1, int_at[2]+1, + max_order, new_at, F, delta, ID); + if ( x2+my2+ z2<F[max_order-1]) AddSamples(int_at[0]-1, int_at[1]+1, int_at[2]-1, + max_order, new_at, F, delta, ID); + if ( x2+my2+mz2<F[max_order-1]) AddSamples(int_at[0]-1, int_at[1]+1, int_at[2]+1, + max_order, new_at, F, delta, ID); + if (mx2+ y2+ z2<F[max_order-1]) AddSamples(int_at[0]+1, int_at[1]-1, int_at[2]-1, + max_order, new_at, F, delta, ID); + if (mx2+ y2+mz2<F[max_order-1]) AddSamples(int_at[0]+1, int_at[1]-1, int_at[2]+1, + max_order, new_at, F, delta, ID); + if (mx2+my2+ z2<F[max_order-1]) AddSamples(int_at[0]+1, int_at[1]+1, int_at[2]-1, + max_order, new_at, F, delta, ID); + if (mx2+my2+mz2<F[max_order-1]) AddSamples(int_at[0]+1, int_at[1]+1, int_at[2]+1, + max_order, new_at, F, delta, ID); + + /* We're done! Convert everything to right size scale */ + for (i=0; i<max_order; i++) + { + F[i]=sqrt(F[i])*(1.0/DENSITY_ADJUSTMENT); + delta[i][0]*=(1.0/DENSITY_ADJUSTMENT); + delta[i][1]*=(1.0/DENSITY_ADJUSTMENT); + delta[i][2]*=(1.0/DENSITY_ADJUSTMENT); + } + + return; + } + + + + static void AddSamples(int32_t xi, int32_t yi, int32_t zi, int32_t max_order, + double at[3], double *F, + double (*delta)[3], uint32_t *ID) + { + double dx, dy, dz, fx, fy, fz, d2; + int32_t count, i, j, index; + uint32_t seed, this_id; + + /* Each cube has a random number seed based on the cube's ID number. + The seed might be better if it were a nonlinear hash like Perlin uses + for noise but we do very well with this faster simple one. + Our LCG uses Knuth-approved constants for maximal periods. */ + seed=702395077*xi + 915488749*yi + 2120969693*zi; + + /* How many feature points are in this cube? */ + count=Poisson_count[(seed>>24)%256]; /* 256 element lookup table. Use MSB */ + + seed=1402024253*seed+586950981; /* churn the seed with good Knuth LCG */ + + for (j=0; j<count; j++) /* test and insert each point into our solution */ + { + this_id=seed; + seed=1402024253*seed+586950981; /* churn */ + + /* compute the 0..1 feature point location's XYZ */ + fx=(seed+0.5)*(1.0/4294967296.0); + seed=1402024253*seed+586950981; /* churn */ + fy=(seed+0.5)*(1.0/4294967296.0); + seed=1402024253*seed+586950981; /* churn */ + fz=(seed+0.5)*(1.0/4294967296.0); + seed=1402024253*seed+586950981; /* churn */ + + /* delta from feature point to sample location */ + dx=xi+fx-at[0]; + dy=yi+fy-at[1]; + dz=zi+fz-at[2]; + + /* Distance computation! Lots of interesting variations are + possible here! + Biased "stretched" A*dx*dx+B*dy*dy+C*dz*dz + Manhattan distance fabs(dx)+fabs(dy)+fabs(dz) + Radial Manhattan: A*fabs(dR)+B*fabs(dTheta)+C*dz +Superquadratic: pow(fabs(dx), A) + pow(fabs(dy), B) + pow(fabs(dz),C) + +Go ahead and make your own! Remember that you must insure that +new distance function causes large deltas in 3D space to map into +large deltas in your distance function, so our 3D search can find +them! [Alternatively, change the search algorithm for your special +cases.] +*/ + + d2=dx*dx+dy*dy+dz*dz; /* Euclidian distance, squared */ + + if (d2<F[max_order-1]) /* Is this point close enough to rememember? */ + { + /* Insert the information into the output arrays if it's close enough. + We use an insertion sort. No need for a binary search to find + the appropriate index.. usually we're dealing with order 2,3,4 so + we can just go through the list. If you were computing order 50 + (wow!!) you could get a speedup with a binary search in the sorted + F[] list. */ + + index=max_order; + while (index>0 && d2<F[index-1]) index--; + + /* We insert this new point into slot # <index> */ + + /* Bump down more distant information to make room for this new point. */ + for (i=max_order-2; i>=index; i--) + { + F[i+1]=F[i]; + ID[i+1]=ID[i]; + delta[i+1][0]=delta[i][0]; + delta[i+1][1]=delta[i][1]; + delta[i+1][2]=delta[i][2]; + } + /* Insert the new point's information into the list. */ + F[index]=d2; + ID[index]=this_id; + delta[index][0]=dx; + delta[index][1]=dy; + delta[index][2]=dz; + } + } + + return; + } + + noise_datum worley(double x, double y, double z) + { + double point[3] = {x,y,z}; + double F[2]; + double delta[2][3]; + uint32_t id[2]; + + Worley(point, 2, F, delta, id); + + noise_datum datum; + datum.first_order = F[0]; + datum.second_order = F[0]; + datum.id = id[0]; + + return datum; + } +} diff --git a/crawl-ref/source/cellular.h b/crawl-ref/source/cellular.h new file mode 100644 index 0000000..3615443 --- /dev/null +++ b/crawl-ref/source/cellular.h @@ -0,0 +1,53 @@ +/* Copyright 1994, 2002 by Steven Worley + This software may be modified and redistributed without restriction + provided this comment header remains intact in the source code. + This code is provided with no warrantee, express or implied, for + any purpose. + + A detailed description and application examples can be found in the + 1996 SIGGRAPH paper "A Cellular Texture Basis Function" and + especially in the 2002 book "Texturing and Modeling, a Procedural + Approach, 3rd edition." There is also extra information on the web + site http://www.worley.com/cellular.html . + + If you do find interesting uses for this tool, and especially if + you enhance it, please drop me an email at steve@worley.com. */ + + + +/* Worley() + + An implementation of the key cellular texturing basis + function. This function is hardwired to return an average F_1 value + of 1.0. It returns the <n> most closest feature point distances + F_1, F_2, .. F_n the vector delta to those points, and a 32 bit + seed for each of the feature points. This function is not + difficult to extend to compute alternative information such as + higher order F values, to use the Manhattan distance metric, or + other fun perversions. + + <at> The input sample location. + <max_order> Smaller values compute faster. < 5, read the book to extend it. + <F> The output values of F_1, F_2, ..F[n] in F[0], F[1], F[n-1] + <delta> The output vector difference between the sample point and the n-th + closest feature point. Thus, the feature point's location is the + hit point minus this value. The DERIVATIVE of F is the unit + normalized version of this vector. + <ID> The output 32 bit ID number which labels the feature point. This + is useful for domain partitions, especially for coloring flagstone + patterns. + + This implementation is tuned for speed in a way that any order > 5 + will likely have discontinuous artifacts in its computation of F5+. + This can be fixed by increasing the internal points-per-cube + density in the source code, at the expense of slower + computation. The book lists the details of this tuning. */ +namespace worley { +struct noise_datum { + double first_order; + double second_order; + uint32_t id; +}; + +noise_datum worley(double x, double y, double z); +} diff --git a/crawl-ref/source/dungeon.cc b/crawl-ref/source/dungeon.cc index 52cec94..0bd3786 100644 --- a/crawl-ref/source/dungeon.cc +++ b/crawl-ref/source/dungeon.cc @@ -2360,7 +2360,7 @@ static bool _builder_by_type(int level_number, level_area_type level_type) _portal_vault_level(level_number); else if (level_type == LEVEL_LABYRINTH) _labyrinth_level(level_number); - else if (level_type == LEVEL_ABYSS) + else if (level_type == LEVEL_ABYSS) generate_abyss(); else if (level_type == LEVEL_PANDEMONIUM) _pan_level(level_number); diff --git a/crawl-ref/source/effects.cc b/crawl-ref/source/effects.cc index ffa8318..22d3617 100644 --- a/crawl-ref/source/effects.cc +++ b/crawl-ref/source/effects.cc @@ -18,6 +18,7 @@ #include "externs.h" #include "options.h" +#include "abyss.h" #include "areas.h" #include "artefact.h" #include "beam.h" @@ -675,6 +676,7 @@ void banished(dungeon_feature_type gate_type, const std::string &who) take_note(Note(NOTE_MESSAGE, 0, 0, what.c_str()), true); } + push_features_to_abyss(); down_stairs(gate_type, you.entry_cause); // heh heh } diff --git a/crawl-ref/source/makefile.obj b/crawl-ref/source/makefile.obj index 6fdda10..6ebf486 100644 --- a/crawl-ref/source/makefile.obj +++ b/crawl-ref/source/makefile.obj @@ -14,6 +14,7 @@ beam.o \ behold.o \ bitary.o \ branch.o \ +cellular.o \ chardump.o \ cio.o \ cloud.o \ -- 1.7.3.2 0001-Abyssal-Knights-are-better-not-in-stone.patch [^] (814 bytes) 2011-05-24 23:55 [Show Content] [Hide Content] From dbbeeca6a4e82d6d567f939b48398052c90d6b05 Mon Sep 17 00:00:00 2001 From: Brendan Hickey <brendan@bhickey.net> Date: Tue, 24 May 2011 22:54:01 +0100 Subject: [PATCH] Abyssal Knights are better not in stone. --- crawl-ref/source/startup.cc | 2 ++ 1 files changed, 2 insertions(+), 0 deletions(-) diff --git a/crawl-ref/source/startup.cc b/crawl-ref/source/startup.cc index 8e7fe2c..d17030b 100644 --- a/crawl-ref/source/startup.cc +++ b/crawl-ref/source/startup.cc @@ -5,6 +5,7 @@ #include "AppHdr.h" +#include "abyss.h" #include "arena.h" #include "cio.h" #include "command.h" @@ -225,6 +226,7 @@ static void _post_init(bool newc) { // Randomise colours properly for the Abyss. init_pandemonium(); + generate_abyss(); } #ifdef DEBUG_DIAGNOSTICS -- 1.7.3.2 |
||||
|
|||||
Issue History | |||||
Date Modified | Username | Field | Change | ||
2011-05-08 01:04 | brendan | New Issue | |||
2011-05-08 01:04 | brendan | File Added: worley-abyss.patch | |||
2011-05-23 21:11 | brendan | Status | new => feedback | ||
2011-05-23 21:11 | brendan | Description Updated | |||
2011-05-23 21:12 | brendan | File Added: 0001-A-worley-noise-abyss.patch | |||
2011-05-24 23:55 | brendan | File Added: 0001-Abyssal-Knights-are-better-not-in-stone.patch | |||
2011-10-13 18:57 | SamB | Note Added: 0015212 | |||
2011-10-13 18:57 | SamB | Status | feedback => resolved | ||
2011-10-13 18:57 | SamB | Fixed in Branch | => 0.10 development branch | ||
2011-10-13 18:57 | SamB | Resolution | open => done | ||
2011-10-13 18:57 | SamB | Assigned To | => SamB | ||
2011-10-13 18:57 | SamB | Status | resolved => closed |
Notes | |||||
|
|||||
|
|