You can’t export records from the Algolia dashboard, only your index configuration. To export records, you must use an API client.
Exporting the index using an API client
Sometimes you may need to export your Algolia index, to use the data in some other way. To do this, use the browse
method with one of the API clients.
The browse
method lets you retrieve records beyond the 1,000 default limit of the search
method.
Use an empty query to retrieve all records. Once you have the records, save them to a file.
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
| // composer autoload
require __DIR__ . '/vendor/autoload.php';
// if you are not using composer
// require_once 'path/to/algoliasearch.php';
$client = Algolia\AlgoliaSearch\SearchClient::create('YourApplicationID', 'YourAdminAPIKey');
$index = $client->initIndex('your_index_name');
$objects = [];
foreach ($index->browseObjects() as $hit) {
$objects[] = $hit;
}
file_put_contents('your_filename', json_encode($objects));
|
1
2
3
4
5
6
7
8
9
10
11
12
13
| require 'json'
require 'algolia'
client = Algolia::Search::Client.create('YourApplicationID', 'YourAdminAPIKey')
index = client.init_index('your_index_name')
hits = []
index.browse_objects({query: ''}) do |hit|
hits.push(hit)
end
File.write('your_filename', hits.to_json)
|
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
| // Only in Node.js
const fs = require('fs');
const algoliasearch = require('algoliasearch');
const client = algoliasearch('YourApplicationID', 'YourAdminAPIKey');
const index = client.initIndex('your_index_name');
let hits = [];
index
.browseObjects({
batch: (objects) => (hits = hits.concat(objects)),
})
.then(() => {
console.log('Finished! We got %d hits', hits.length);
fs.writeFile(
'browse.json',
JSON.stringify(hits, null, 2),
'utf-8',
(err) => {
if (err) throw err;
console.log('Your index was successfully exported!');
}
);
});
|
1
2
3
4
5
6
7
8
9
10
11
12
13
| import json
from algoliasearch.search_client import SearchClient
client = SearchClient.create('YourApplicationID', 'YourAdminAPIKey')
index = client.init_index('your_index_name')
hits = []
for hit in index.browse_objects({'query': ''}):
hits.append(hit)
with open('your_filename', 'w') as f:
json.dump(hits, f)
|
1
2
3
4
5
6
7
8
9
10
11
| index.browseObjects { result in
if case .success(let response) = result {
let records: [JSON] = response.flatMap { $0.hits.map(\.object) }
do {
let recordsData = try JSONEncoder().encode(records)
try recordsData.write(to: .init(fileURLWithPath: ""), options: .atomic)
} catch let error {
print(error)
}
}
}
|
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
| SearchClient client = new SearchClient("YourApplicationID", "YourAdminAPIKey");
SearchIndex index = client.InitIndex("your_index_name");
var result = index.Browse<JObject>(new BrowseIndexQuery());
JArray records = new JArray();
foreach (var hit in result)
{
records.Add(hit);
}
using (StreamWriter file = File.CreateText(@"your_filename.json"))
{
JsonSerializer serializer = new JsonSerializer();
serializer.Serialize(file, records);
}
|
1
2
3
4
5
6
7
8
9
10
11
| SearchClient client =
DefaultSearchClient.create("YourApplicationID", "YourAdminAPIKey");
SearchIndex<Actor> index = client.initIndex("actors", Actor.class);
IndexIterable<Employee> results = index.browseObjects(new BrowseIndexQuery());
List<Employee> records = new ArrayList<>();
results.forEach(records::add);
ObjectMapper objectMapper = Defaults.getObjectMapper();
objectMapper.writeValue(new File("your_filename.json"), records);
|
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
| package main
import (
"encoding/json"
"io"
"io/ioutil"
"os"
"github.com/algolia/algoliasearch-client-go/v3/algolia/search"
)
type Actor struct {
Name string `json:"name"`
Rating int `json:"rating"`
ImagePath string `json:"image_path"`
AlternativeName string `json:"alternative_name"`
ObjectID string `json:"objectID"`
}
func main() {
client := search.NewClient("YourApplicationID", "YourAdminAPIKey")
index := client.InitIndex("actors")
it, err := index.BrowseObjects()
if err != nil {
// error handling
}
var actors []Actor
var actor Actor
for {
_, err = it.Next(&actor)
if err != nil {
if err == io.EOF {
break
}
// error handling
}
actors = append(actors, actor)
}
data, err := json.Marshal(actors)
if err != nil {
// error handling
}
err = ioutil.WriteFile("actors_downloaded.json", data, os.ModePerm)
if err != nil {
// error handling
}
}
|
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
| import java.io.{File, PrintWriter}
import algolia.AlgoliaDsl._
import algolia.objects.Query
import algolia.responses.ObjectID
import algolia.{AlgoliaClient, AlgoliaSyncHelper}
import org.json4s.native.Serialization.write
import scala.concurrent.{ExecutionContext, ExecutionContextExecutor}
import scala.concurrent.duration._
case class MyCaseClass(objectID: String, /* ... */) extends ObjectID
object Main {
def main(args: Array[String]): Unit = {
implicit val ec: ExecutionContextExecutor = ExecutionContext.global
implicit val awaitDuration: FiniteDuration = 10 seconds
val client = new AlgoliaClient("YourApplicationID", "YourAdminAPIKey")
val indexName = "your_index_name"
val helper = AlgoliaSyncHelper(client)
val records: Seq[MyCaseClass] = helper.browse[Show](indexName, Query(query = Some("")))
.flatten
.toSeq
val w = new PrintWriter(new File("your_filename.json"))
w.write(write(records))
w.close()
}
}
|
1
2
3
4
5
6
| val records = index.browseObjects().flatMap { response ->
response.hits.map { it.json }
}
val json = Json.stringify(JsonObjectSerializer.list, records)
File("your_filename.json").writeText(json)
|
When exporting large indices, batch the records for optimal performance.
Importing the index
You can import records from your data file into a new index with the dashboard or by using the saveObjects
method.
You can also import an exported index configuration file from Indices > Browse > Manage index > Import Configuration.
Exporting and importing index configuration data
You can export various index configuration options into a file. This file contains that index’s values for:
To export the configuration file, select an index from the Indices section, click the Manage index button, and choose Export configuration. You can decide to export one or more of Settings, Synonyms, or Rules. The configuration file is in JSON format.
Exporting configuration data using the API
Importing index configuration data from the dashboard
To import index configuration settings from an exported JSON file:
- Select an index from the Indices section, click the Manage index button, and choose Import configuration.
- Upload your file.
- Choose how you want to import your data by clicking one or more of Settings, Synonyms or Rules. For Synonyms and Rules you can determine if your index values will be either overwritten (cleared and replaced) or updated.
- Type IMPORT and then click Import Configuration to start the process.
Importing configuration data using the API