-
Notifications
You must be signed in to change notification settings - Fork 11
/
Copy pathCreatesBatchPredition.cs
40 lines (37 loc) · 1.59 KB
/
CreatesBatchPredition.cs
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
using BigML;
using System;
using System.Threading.Tasks;
namespace Demo
{
/// <summary>
/// Creates a batch prediction using a dataset and an ensemble stored
/// in BigML.
///
/// See complete BatchPredictions documentation at
/// https://bigml.com/api/batch_predictions
/// </summary>
class CreatesBatchPrediction
{
static async void Main()
{
// New BigML client in production mode with username and API key
Console.Write("user: "); var User = Console.ReadLine();
Console.Write("key: "); var ApiKey = Console.ReadLine();
var client = new Client(User, ApiKey);
// change the id to your model, ensemble or logisticregression
string modelId = "ensemble/54ad6d0558a27e2ddf000XXX";
string datasetId = "dataset/54ad6d0558a27e2ddf000YYY";
var parameters = new BatchPrediction.Arguments();
// "model" parameter can be a model, an ensemble or a logisticregression
parameters.Add("model", modelId);
parameters.Add("dataset", datasetId);
parameters.Add("output_dataset", true);
BatchPrediction batchPrediction;
batchPrediction = await client.CreateBatchPrediction(parameters);
string batchPredictionId = batchPrediction.Resource;
// wait for finish
while ((batchPrediction = await client.Get<BatchPrediction>(batchPredictionId)).StatusMessage.NotSuccessOrFail()) await Task.Delay(10);
Console.WriteLine(batchPrediction.OutputDatasetResource);
}
}
}