Skip to content

Commit

Permalink
fix: not everything needs -addalpha
Browse files Browse the repository at this point in the history
  • Loading branch information
blacha committed Dec 17, 2019
1 parent 579e8cc commit 223256d
Show file tree
Hide file tree
Showing 5 changed files with 105 additions and 37 deletions.
48 changes: 39 additions & 9 deletions packages/cog/src/builder.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,13 +7,21 @@ import pLimit, { Limit } from 'p-limit';
import { TileCover } from './cover';
import { getProjection } from './proj';

export interface CogBuilderMetadata {
/** Bounding boxes for all polygons */
bounds: GeoJSON.FeatureCollection;
export interface CogBuilderMetadata extends CogBuilderBounds {
/** Quadkey indexes for the covering tiles */
covering: string[];
/** Lowest quality resolution for images */
}

export interface CogBuilderBounds {
/** Number of imagery bands generally RGB (3) or RGBA (4) */
bands: number;
/** Bounding box for polygons */
bounds: GeoJSON.FeatureCollection;
/** Lowest quality resolution of image */
resolution: number | -1;

/** EPSG projection number */
projection: number;
}

const proj256 = new Projection(256);
Expand All @@ -36,29 +44,46 @@ export class CogBuilder {
* Get the source bounds a collection of tiffs
* @param tiffs
*/
async bounds(tiffs: string[]): Promise<{ bounds: GeoJSON.FeatureCollection; resolution: number }> {
async bounds(tiffs: string[]): Promise<CogBuilderBounds> {
let resolution = -1;
let bandCount = -1;
let projection = -1;
const coordinates = tiffs.map(tiffPath => {
return this.q(async () => {
const source = CogBuilder.createTiffSource(tiffPath);
const tiff = new CogTiff(source);
await tiff.init();

const image = tiff.getImage(0);
const tiffRes = await this.getTiffResolution(tiff);
if (tiffRes > resolution) {
resolution = tiffRes;
}
const tiffBandCount = image.value(TiffTag.BitsPerSample) as number[] | null;
if (tiffBandCount != null && tiffBandCount.length > bandCount) {
bandCount = tiffBandCount.length;
}

const output = await this.getTifBounds(tiff);
if (CogSourceFile.isSource(source)) {
await source.close();
}

const imageProjection = image.geoTiffTag(TiffTagGeo.ProjectedCSTypeGeoKey) as number;
if (imageProjection != null && imageProjection != projection) {
if (projection != -1) {
throw new Error('Multiple projections');
}
projection = imageProjection;
}

return output;
});
});

const polygons = await Promise.all(coordinates);
return {
projection,
bands: bandCount,
bounds: GeoJson.toFeatureCollection(polygons),
resolution,
};
Expand Down Expand Up @@ -123,9 +148,14 @@ export class CogBuilder {
* @returns List of QuadKey indexes for
*/
async build(tiffs: string[]): Promise<CogBuilderMetadata> {
const { bounds, resolution } = await this.bounds(tiffs);
const covering = TileCover.cover(bounds, 1, Math.min(this.maxTileZoom, resolution - 2), this.maxTileCount);
return { bounds, resolution, covering };
const metadata = await this.bounds(tiffs);
const covering = TileCover.cover(
metadata.bounds,
1,
Math.min(this.maxTileZoom, metadata.resolution - 2),
this.maxTileCount,
);
return { ...metadata, covering };
}

static createTiffSource(tiff: string): CogSource {
Expand Down
77 changes: 55 additions & 22 deletions packages/cog/src/cli/cli.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
#!/usr/bin/env node
import { LogConfig, LogType, GeoJson, Projection, EPSG } from '@basemaps/shared';
import { LogConfig, LogType, GeoJson, Projection, EPSG, Env } from '@basemaps/shared';
import * as fs from 'fs';
import * as Mercator from 'global-mercator';
import pLimit from 'p-limit';
Expand All @@ -8,39 +8,60 @@ import 'source-map-support/register';
import { CogBuilder, CogBuilderMetadata } from '../builder';
import { GdalCogBuilder } from '../gdal';
import { GdalDocker } from '../gdal.docker';
import { createHash } from 'crypto';

// At most TiffConcurrency will be built at one time.
const LoadingQueue = pLimit(parseInt(process.env[Env.TiffConcurrency] ?? '4'));

const isDryRun = (): boolean => process.argv.indexOf('--commit') == -1;

async function buildVrt(filePath: string, tiffFiles: string[], logger: LogType): Promise<string> {
const vrtPath = path.join(filePath, '.vrt');
const vrtWarpedPath = path.join(filePath, `.epsg${EPSG.Google}.vrt`);
interface VrtOptions {
/** Vrts will add a second alpha layer if one exists, so dont always add one */
addAlpha: boolean;
/** No need to force a reprojection to 3857 if source imagery is in 3857 */
forceEpsg3857: boolean;
}

async function buildVrt(filePath: string, tiffFiles: string[], options: VrtOptions, logger: LogType): Promise<string> {
// Create a somewhat unique name for the vrts
const vrtName = createHash('sha256')
.update(tiffFiles.join(''))
.digest('hex');

const vrtPath = path.join(filePath, `.__${vrtName}.vrt`);
const vrtWarpedPath = path.join(filePath, `.__${vrtName}.epsg${EPSG.Google}.vrt`);

if (fs.existsSync(vrtPath)) {
fs.unlinkSync(vrtPath);
}
const gdalDocker = new GdalDocker(filePath);

// TODO -addalpha adds a 2nd alpha layer if one exists
logger.info({ path: vrtPath }, 'BuildVrt');
if (isDryRun()) {
return vrtWarpedPath;
}
await gdalDocker.run(['gdalbuildvrt', '-addalpha', '-hidenodata', vrtPath, ...tiffFiles]);

// -addalpha adds a 2nd alpha layer if one exists
const buildVrtCmd = ['gdalbuildvrt', '-hidenodata'];
if (options.addAlpha) {
buildVrtCmd.push('-addalpha');
}
await gdalDocker.run([...buildVrtCmd, vrtPath, ...tiffFiles], logger);

/** Force a reprojection to 3857 if required */
if (!options.forceEpsg3857) {
return vrtPath;
}

if (fs.existsSync(vrtWarpedPath)) {
fs.unlinkSync(vrtWarpedPath);
}

logger.info({ path: vrtWarpedPath }, 'BuildVrtWarped');
await gdalDocker.run([
'gdalwarp',
'-of',
'VRT',
'-t_srs',
Projection.toEpsgString(EPSG.Google),
vrtPath,
vrtWarpedPath,
]);
logger.info({ path: vrtWarpedPath }, 'BuildVrt:Warped');
await gdalDocker.run(
['gdalwarp', '-of', 'VRT', '-t_srs', Projection.toEpsgString(EPSG.Google), vrtPath, vrtWarpedPath],
logger,
);
return vrtWarpedPath;
}

Expand Down Expand Up @@ -81,7 +102,7 @@ async function processQuadKey(
);

if (!isDryRun()) {
await gdal.convert();
await gdal.convert(logger.child({ quadKey }));
}
}

Expand All @@ -100,8 +121,6 @@ export async function main(): Promise<void> {
logger.warn('Commit');
}

const Q = pLimit(4);

const filePath = process.argv[2];
const maxTiles = parseInt(process.argv[3] ?? '50', 10);
const files = fs
Expand All @@ -110,10 +129,24 @@ export async function main(): Promise<void> {
.map((f: string): string => path.join(filePath, f));

const builder = new CogBuilder(5, maxTiles);
logger.info({ fileCount: files.length }, 'BoundingBox');
logger.info({ tiffCount: files.length }, 'CreateBoundingBox');
const metadata = await builder.build(files);

const inputVrt = await buildVrt(filePath, files, logger);
const vrtOptions = { addAlpha: true, forceEpsg3857: true };

// -addalpha to vrt adds extra alpha layers even if one already exist
if (metadata.bands > 3) {
logger.warn({ bandCount: metadata.bands }, 'Vrt:DetectedAlpha, Disabling -addalpha');
vrtOptions.addAlpha = false;
}

// If the source imagery is in 900931, no need to force a warp
if (metadata.projection == EPSG.Google) {
logger.warn({ bandCount: metadata.bands }, 'Vrt:GoogleProjection, Disabling warp');
vrtOptions.forceEpsg3857 = false;
}

const inputVrt = await buildVrt(filePath, files, vrtOptions, logger);

const outputPath = path.join(filePath, 'cog');
if (!fs.existsSync(outputPath)) {
Expand All @@ -137,7 +170,7 @@ export async function main(): Promise<void> {
logger.info({ count: coveringBounds.length, indexes: metadata.covering.join(', ') }, 'Covered');

const todo = metadata.covering.map((quadKey: string, index: number) => {
return Q(async () => {
return LoadingQueue(async () => {
const startTime = Date.now();
logger.info({ quadKey, index }, 'Start');
await processQuadKey(quadKey, inputVrt, outputPath, metadata, index, logger);
Expand Down
6 changes: 4 additions & 2 deletions packages/cog/src/cover.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ export class TileCover {
* @param maxZoom Highest zoom level of tile to use
* @param maxTiles Max number of tiles to be a "valid" covering
*/
static cover(featureCollection: GeoJSON.FeatureCollection, minZoom = 1, maxZoom = 13, maxTiles = 25): string[] {
static cover(featureCollection: GeoJSON.FeatureCollection, minZoom = 2, maxZoom = 13, maxTiles = 25): string[] {
/* eslint-disable @typescript-eslint/camelcase */
const limits = { min_zoom: minZoom, max_zoom: maxZoom };

Expand All @@ -30,7 +30,9 @@ export class TileCover {
}
}
return true;
});
})
// make the output go from 0 -> 3
.sort();

if (indexes.length < maxTiles) {
return indexes;
Expand Down
9 changes: 6 additions & 3 deletions packages/cog/src/gdal.docker.ts
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ export class GdalDocker {
];
}

run(args: string[], log?: LogType): Promise<void> {
run(args: string[], log: LogType): Promise<void> {
if (this.promise != null) {
return this.promise;
}
Expand All @@ -64,14 +64,17 @@ export class GdalDocker {
this.promise = new Promise((resolve, reject) => {
child.on('exit', (code: number) => {
if (code != 0) {
log?.error({ code, log: errorBuff.join('').trim() }, 'FailedToConvert');
log.error({ code, log: errorBuff.join('').trim() }, 'FailedToConvert');
return reject(new Error('Failed to execute GDAL: ' + errorBuff.join('').trim()));
}
if (errorBuff.length > 0) {
log.warn({ log: errorBuff.join('').trim() }, 'CogWarnings');
}
this.promise = null;
return resolve();
});
child.on('error', (error: Error) => {
log?.error({ error, log: errorBuff.join('').trim() }, 'FailedToConvert');
log.error({ error, log: errorBuff.join('').trim() }, 'FailedToConvert');
this.promise = null;
reject(error);
});
Expand Down
2 changes: 1 addition & 1 deletion packages/cog/src/gdal.ts
Original file line number Diff line number Diff line change
Expand Up @@ -109,7 +109,7 @@ export class GdalCogBuilder {
];
}

convert(log?: LogType): Promise<void> {
convert(log: LogType): Promise<void> {
return this.gdal.run(this.args, log);
}
}

0 comments on commit 223256d

Please sign in to comment.