Skip to content

[feat] support for table plugin in demo examples and privately stored datasets #2923

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 5 commits into from
Jan 21, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 8 additions & 0 deletions examples/demo-app/src/actions.js
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ import {
// CONSTANTS
export const INIT = 'INIT';
export const LOAD_REMOTE_RESOURCE_SUCCESS = 'LOAD_REMOTE_RESOURCE_SUCCESS';
export const LOAD_REMOTE_DATASET_PROCESSED_SUCCESS = 'LOAD_REMOTE_DATASET_PROCESSED_SUCCESS';
export const LOAD_REMOTE_RESOURCE_ERROR = 'LOAD_REMOTE_RESOURCE_ERROR';
export const LOAD_MAP_SAMPLE_FILE = 'LOAD_MAP_SAMPLE_FILE';
export const SET_SAMPLE_LOADING_STATUS = 'SET_SAMPLE_LOADING_STATUS';
Expand All @@ -46,6 +47,13 @@ export function loadRemoteResourceSuccess(response, config, options, remoteDatas
};
}

export function loadRemoteDatasetProcessedSuccessAction(result) {
return {
type: LOAD_REMOTE_DATASET_PROCESSED_SUCCESS,
payload: result
};
}

export function loadRemoteResourceError(error, url) {
return {
type: LOAD_REMOTE_RESOURCE_ERROR,
Expand Down
106 changes: 80 additions & 26 deletions examples/demo-app/src/reducers/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -3,19 +3,24 @@

import {combineReducers} from 'redux';
import {handleActions} from 'redux-actions';
import Task, {withTask} from 'react-palm/tasks';

import keplerGlReducer, {combinedUpdaters, uiStateUpdaters} from '@kepler.gl/reducers';
import {aiAssistantReducer} from '@kepler.gl/ai-assistant';
import {EXPORT_MAP_FORMATS} from '@kepler.gl/constants';
import {processGeojson, processRowObject, processArrowTable} from '@kepler.gl/processors';
import keplerGlReducer, {combinedUpdaters, uiStateUpdaters} from '@kepler.gl/reducers';
import KeplerGlSchema from '@kepler.gl/schemas';
import {EXPORT_MAP_FORMATS} from '@kepler.gl/constants';
import {aiAssistantReducer} from '@kepler.gl/ai-assistant';
import {KeplerTable} from '@kepler.gl/table';
import {getApplicationConfig} from '@kepler.gl/utils';

import {
INIT,
LOAD_MAP_SAMPLE_FILE,
LOAD_REMOTE_RESOURCE_SUCCESS,
LOAD_REMOTE_DATASET_PROCESSED_SUCCESS,
LOAD_REMOTE_RESOURCE_ERROR,
SET_SAMPLE_LOADING_STATUS
SET_SAMPLE_LOADING_STATUS,
loadRemoteDatasetProcessedSuccessAction
} from '../actions';

import {CLOUD_PROVIDERS_CONFIGURATION} from '../constants/default-settings';
Expand Down Expand Up @@ -81,9 +86,35 @@ const demoReducer = combineReducers({
aiAssistant: aiAssistantReducer
});

async function loadRemoteResourceSuccessTask({
dataUrl,
datasetId,
processorMethod,
remoteDatasetConfig,
unprocessedData
}) {
if (dataUrl) {
const data = await processorMethod(unprocessedData);
return {
info: {
id: datasetId
},
data
};
}

// remote datasets like vector tile datasets
return remoteDatasetConfig;
}

const LOAD_REMOTE_RESOURCE_SUCCESS_TASK = Task.fromPromise(
loadRemoteResourceSuccessTask,
'LOAD_REMOTE_RESOURCE_SUCCESS_TASK'
);

// this can be moved into a action and call kepler.gl action
/**
*
* Used to load Kepler.gl demo examples
* @param state
* @param action {map: resultset, config, map}
* @returns {{app: {isMapLoading: boolean}, keplerGl: {map: (state|*)}}}
Expand All @@ -96,40 +127,62 @@ export const loadRemoteResourceSuccess = (state, action) => {
const {shape} = dataUrl ? action.response : {};
let processorMethod = processRowObject;
let unprocessedData = action.response;
unprocessedData = shape === 'object-row-table' ? action.response.data : unprocessedData;

if (dataUrl) {
if (shape === 'arrow-table') {
processorMethod = processArrowTable;
} else if (shape === 'object-row-table') {
processorMethod = processRowObject;
unprocessedData = action.response.data;
} else if (dataUrl.includes('.json') || dataUrl.includes('.geojson')) {
processorMethod = processGeojson;
const table = getApplicationConfig().table ?? KeplerTable;
if (typeof table.getFileProcessor === 'function') {
if (shape === 'arrow-table') {
// arrow processor from table plugin expects batches
unprocessedData = action.response.data.batches;
}
// use custom processors from table class
const processorResult = table.getFileProcessor(unprocessedData);
// TODO save processorResult.format here with the dataset
processorMethod = processorResult.processor;
} else {
throw new Error('Failed to select data processor');
if (shape === 'arrow-table') {
processorMethod = processArrowTable;
} else if (shape === 'object-row-table') {
processorMethod = processRowObject;
} else if (dataUrl.includes('.json') || dataUrl.includes('.geojson')) {
processorMethod = processGeojson;
} else {
throw new Error('Failed to select data processor');
}
}
}

const datasets = dataUrl
? {
info: {
id: datasetId
},
data: processorMethod(unprocessedData)
}
: // remote datasets like vector tile datasets
action.remoteDatasetConfig;
// processorMethod can be async so create a task
const task = LOAD_REMOTE_RESOURCE_SUCCESS_TASK({
dataUrl,
datasetId,
processorMethod,
remoteDatasetConfig: action.remoteDatasetConfig,
unprocessedData
}).bimap(
datasets => loadRemoteDatasetProcessedSuccessAction({...action, datasets}),
() => {
throw new Error('loadRemoteResource data processor failed');
}
);

return withTask(state, task);
};

const loadRemoteDatasetProcessedSuccess = (state, action) => {
const {config, datasets, options} = action.payload;

const config = action.config ? KeplerGlSchema.parseSavedConfig(action.config) : null;
const parsedConfig = config ? KeplerGlSchema.parseSavedConfig(config) : null;

const keplerGlInstance = combinedUpdaters.addDataToMapUpdater(
state.keplerGl.map, // "map" is the id of your kepler.gl instance
{
payload: {
datasets,
config,
config: parsedConfig,
options: {
centerMap: Boolean(!action.config)
centerMap: Boolean(!config)
}
}
}
Expand All @@ -139,7 +192,7 @@ export const loadRemoteResourceSuccess = (state, action) => {
...state,
app: {
...state.app,
currentSample: action.options,
currentSample: options,
isMapLoading: false // we turn off the spinner
},
keplerGl: {
Expand Down Expand Up @@ -177,6 +230,7 @@ export const loadRemoteResourceError = (state, action) => {

const composedUpdaters = {
[LOAD_REMOTE_RESOURCE_SUCCESS]: loadRemoteResourceSuccess,
[LOAD_REMOTE_DATASET_PROCESSED_SUCCESS]: loadRemoteDatasetProcessedSuccess,
[LOAD_REMOTE_RESOURCE_ERROR]: loadRemoteResourceError
};

Expand Down
15 changes: 14 additions & 1 deletion src/actions/src/provider-actions.ts
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,8 @@ import {
import {Provider} from '@kepler.gl/cloud-providers';

// eslint-disable-next-line prettier/prettier
const assignType = <T>(obj: T): { [K in keyof T]: `${typeof ACTION_PREFIX}${string & K}`; } => obj as any
const assignType = <T>(obj: T): {[K in keyof T]: `${typeof ACTION_PREFIX}${string & K}`} =>
obj as any;
export const ActionTypes = assignType({
EXPORT_FILE_TO_CLOUD: `${ACTION_PREFIX}EXPORT_FILE_TO_CLOUD`,
EXPORT_FILE_SUCCESS: `${ACTION_PREFIX}EXPORT_FILE_SUCCESS`,
Expand All @@ -21,6 +22,7 @@ export const ActionTypes = assignType({
POST_SAVE_LOAD_SUCCESS: `${ACTION_PREFIX}POST_SAVE_LOAD_SUCCESS`,
LOAD_CLOUD_MAP: `${ACTION_PREFIX}LOAD_CLOUD_MAP`,
LOAD_CLOUD_MAP_SUCCESS: `${ACTION_PREFIX}LOAD_CLOUD_MAP_SUCCESS`,
LOAD_CLOUD_MAP_SUCCESS_2: `${ACTION_PREFIX}LOAD_CLOUD_MAP_SUCCESS_2`,
LOAD_CLOUD_MAP_ERROR: `${ACTION_PREFIX}LOAD_CLOUD_MAP_ERROR`
});

Expand Down Expand Up @@ -110,6 +112,17 @@ export const loadCloudMapSuccess: (p: LoadCloudMapSuccessPayload) => {
payload
}));

/** LOAD_CLOUD_MAP_SUCCESS_2 */
export type LoadCloudMapSuccess2Payload = LoadCloudMapSuccessPayload & {
datasetsPayload: any;
};
export const loadCloudMapSuccess2: (p: LoadCloudMapSuccess2Payload) => {
type: typeof ActionTypes.LOAD_CLOUD_MAP_SUCCESS_2;
payload: LoadCloudMapSuccess2Payload;
} = createAction(ActionTypes.LOAD_CLOUD_MAP_SUCCESS_2, (payload: LoadCloudMapSuccess2Payload) => ({
payload
}));

/** LOAD_CLOUD_MAP_ERROR */
export type LoadCloudMapErrorPayload = {
error: any;
Expand Down
104 changes: 87 additions & 17 deletions src/reducers/src/provider-state-updaters.ts
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
// SPDX-License-Identifier: MIT
// Copyright contributors to the kepler.gl project

import {withTask} from 'react-palm/tasks';
import Task, {withTask} from 'react-palm/tasks';
import Console from 'global/console';
import {getError, isPlainObject} from '@kepler.gl/utils';
import {getApplicationConfig, getError, isPlainObject} from '@kepler.gl/utils';
import {generateHashId, toArray} from '@kepler.gl/common-utils';
import {
EXPORT_FILE_TO_CLOUD_TASK,
Expand All @@ -16,6 +16,7 @@ import {
exportFileError,
postSaveLoadSuccess,
loadCloudMapSuccess,
loadCloudMapSuccess2,
loadCloudMapError,
resetProviderStatus,
removeNotification,
Expand All @@ -30,10 +31,11 @@ import {
DATASET_FORMATS,
OVERWRITE_MAP_ID
} from '@kepler.gl/constants';
import {ExportFileToCloudPayload} from '@kepler.gl/types';
import {AddDataToMapPayload, ExportFileToCloudPayload} from '@kepler.gl/types';

import {FILE_CONFLICT_MSG, MapListItem} from '@kepler.gl/cloud-providers';
import {DATASET_HANDLERS} from '@kepler.gl/processors';
import {KeplerTable} from '@kepler.gl/table';

type ActionPayload<P> = {
type?: string;
Expand Down Expand Up @@ -263,6 +265,17 @@ function getDatasetHandler(format) {
return defaultHandler;
}

// use custom processors from table class
const TableClass = getApplicationConfig().table ?? KeplerTable;
if (typeof TableClass.getFileProcessor === 'function') {
const processorResult = TableClass.getFileProcessor(null, format);
if (!processorResult.processor) {
Console.warn(`No processor found for format ${format}, will use csv by default`);
return defaultHandler;
}
return processorResult.processor;
}

if (!DATASET_HANDLERS[format]) {
const supportedFormat = Object.keys(DATASET_FORMATS)
.map(k => `'${k}'`)
Expand All @@ -276,19 +289,46 @@ function getDatasetHandler(format) {
return DATASET_HANDLERS[format];
}

function parseLoadMapResponse(response, loadParams, provider) {
/**
* A task to handle async processorMethod
* @param param0
* @returns
*/
async function parseLoadMapResponseTask({
response,
loadParams,
provider
}: {
response: ProviderActions.LoadCloudMapSuccessPayload['response'];
loadParams: ProviderActions.LoadCloudMapSuccessPayload['loadParams'];
provider: ProviderActions.LoadCloudMapSuccessPayload['provider'];
}) {
const {map, format} = response;
const processorMethod = getDatasetHandler(format);

const parsedDatasets = toArray(map.datasets).map(ds => {
if (format === DATASET_FORMATS.keplergl) {
// no need to obtain id, directly pass them in
return processorMethod(ds);
}
const info = (ds && ds.info) || {id: generateHashId(6)};
const data = processorMethod(ds.data || ds);
return {info, data};
});
let parsedDatasets: AddDataToMapPayload['datasets'] = [];

if (
format === DATASET_FORMATS.keplergl &&
processorMethod !== DATASET_HANDLERS[DATASET_FORMATS.keplergl]
) {
// plugin table provides processor for keplergl map, not single dataset with allData
const parsedMap = await processorMethod(map);
parsedDatasets = parsedMap.datasets;
} else {
const datasets = toArray(map.datasets);
parsedDatasets = await Promise.all(
datasets.map(async ds => {
if (format === DATASET_FORMATS.keplergl) {
// no need to obtain id, directly pass them in
return await processorMethod(ds);
}
const info = (ds && ds.info) || {id: generateHashId(6)};
const data = await processorMethod(ds.data || ds);
return {info, data};
})
);
}

const info = {
...map.info,
Expand All @@ -302,11 +342,19 @@ function parseLoadMapResponse(response, loadParams, provider) {
};
}

const PARSE_LOAD_MAP_RESPONSE_TASK = Task.fromPromise(
parseLoadMapResponseTask,
'PARSE_LOAD_MAP_RESPONSE_TASK'
);

/**
* Used to load resources stored in a private storage.
*/
export const loadCloudMapSuccessUpdater = (
state: ProviderState,
action: ActionPayload<ProviderActions.LoadCloudMapSuccessPayload>
): ProviderState => {
const {response, loadParams, provider, onSuccess, onError} = action.payload;
const {response, loadParams, provider, onError} = action.payload;

const formatError = checkLoadMapResponseError(response);
if (formatError) {
Expand All @@ -316,6 +364,30 @@ export const loadCloudMapSuccessUpdater = (
});
}

// processorMethod can be async so create a task
const parseLoadMapResponseTask = PARSE_LOAD_MAP_RESPONSE_TASK({
response,
loadParams,
provider
}).bimap(
(datasetsPayload: AddDataToMapPayload) => {
return loadCloudMapSuccess2({...action.payload, datasetsPayload});
},
error =>
exportFileErrorUpdater(state, {
payload: {error, provider, onError}
})
);

return withTask(state, parseLoadMapResponseTask);
};

export const loadCloudMapSuccess2Updater = (
state: ProviderState,
action: ActionPayload<ProviderActions.LoadCloudMapSuccess2Payload>
): ProviderState => {
const {datasetsPayload, response, loadParams, provider, onSuccess} = action.payload;

const newState = {
...state,
mapSaved: provider.name,
Expand All @@ -324,10 +396,8 @@ export const loadCloudMapSuccessUpdater = (
isProviderLoading: false
};

const payload = parseLoadMapResponse(response, loadParams, provider);

const tasks = [
ACTION_TASK().map(() => addDataToMap(payload)),
ACTION_TASK().map(() => addDataToMap(datasetsPayload)),
createActionTask(onSuccess, {response, loadParams, provider}),
ACTION_TASK().map(() => postSaveLoadSuccess(`Map from ${provider.name} loaded`))
].filter(d => d);
Expand Down
Loading
Loading