提交 18e5935c 编写于 作者: T Travis CI

Deploy to GitHub Pages: 0e2acb8b

上级 c99a8742
...@@ -74,14 +74,25 @@ typedef enum { ...@@ -74,14 +74,25 @@ typedef enum {
typedef struct { typedef struct {
char* name; char* name;
paddle_element_type element_type; paddle_element_type element_type;
void* content; unsigned char* content;
int content_len; int content_len;
} paddle_parameter, paddle_gradient; } paddle_parameter, paddle_gradient;
typedef struct paddle_pserver_client paddle_pserver_client; typedef int paddle_pserver_client;
paddle_pserver_client* paddle_new_pserver_client(); /**
void paddle_pserver_client_release(paddle_pserver_client* client); * @brief creates a pserver client that talks to etcd for coordination.
*/
paddle_pserver_client paddle_new_etcd_pserver_client(char* etcd_addr);
/**
* @brief creates a pserver client given pserver addresses.
*
* @param pserver_addrs comma-separated pserver addresses.
* @param selected if current pserver client is selected to initialize all parameter servers.
*/
paddle_pserver_client paddle_new_pserver_client(char* pserver_addrs, int selected);
void paddle_pserver_client_release(paddle_pserver_client c);
/** /**
* @brief paddle_begin_init_params begins to initialize parameters on * @brief paddle_begin_init_params begins to initialize parameters on
...@@ -95,7 +106,7 @@ void paddle_pserver_client_release(paddle_pserver_client* client); ...@@ -95,7 +106,7 @@ void paddle_pserver_client_release(paddle_pserver_client* client);
* @return 1 if the trainer is selected to initialize parameter * @return 1 if the trainer is selected to initialize parameter
* servers, otherwise 0. * servers, otherwise 0.
*/ */
int paddle_begin_init_params(paddle_pserver_client* client); int paddle_begin_init_params(paddle_pserver_client client);
/** /**
* @brief paddle_init_param initializes the parameter on parameter * @brief paddle_init_param initializes the parameter on parameter
...@@ -109,7 +120,7 @@ int paddle_begin_init_params(paddle_pserver_client* client); ...@@ -109,7 +120,7 @@ int paddle_begin_init_params(paddle_pserver_client* client);
* @paddle_begin_init_param). Or simply exit the program and wait for * @paddle_begin_init_param). Or simply exit the program and wait for
* the cluster management system to restart the trainer. * the cluster management system to restart the trainer.
*/ */
int paddle_init_param(paddle_pserver_client* client, paddle_parameter param, const unsigned char* param_config_proto, int config_len); int paddle_init_param(paddle_pserver_client client, paddle_parameter param, const unsigned char* param_config_proto, int config_len);
/** /**
* @brief paddle_finish_init_params tells parameter servers client has * @brief paddle_finish_init_params tells parameter servers client has
...@@ -120,7 +131,7 @@ int paddle_init_param(paddle_pserver_client* client, paddle_parameter param, con ...@@ -120,7 +131,7 @@ int paddle_init_param(paddle_pserver_client* client, paddle_parameter param, con
* @paddle_begin_init_param). Or simply exit the program and wait for * @paddle_begin_init_param). Or simply exit the program and wait for
* the cluster management system to restart the trainer. * the cluster management system to restart the trainer.
*/ */
int paddle_finish_init_params(paddle_pserver_client* client); int paddle_finish_init_params(paddle_pserver_client client);
/** /**
* @brief paddle_send_grads sends gradients to parameter servers for * @brief paddle_send_grads sends gradients to parameter servers for
...@@ -131,7 +142,7 @@ int paddle_finish_init_params(paddle_pserver_client* client); ...@@ -131,7 +142,7 @@ int paddle_finish_init_params(paddle_pserver_client* client);
* @param learning_rate the learning rate for the gradients. * @param learning_rate the learning rate for the gradients.
* @return 0 if successful, otherwise -1. * @return 0 if successful, otherwise -1.
*/ */
int paddle_send_grads(paddle_pserver_client* client, const paddle_gradient* grads, int len); int paddle_send_grads(paddle_pserver_client client, const paddle_gradient* grads, int len);
/** /**
* @brief paddle_get_params gets parameters from parameter servers. * @brief paddle_get_params gets parameters from parameter servers.
...@@ -139,13 +150,15 @@ int paddle_send_grads(paddle_pserver_client* client, const paddle_gradient* grad ...@@ -139,13 +150,15 @@ int paddle_send_grads(paddle_pserver_client* client, const paddle_gradient* grad
* paddle_get_params will block until parameters are initialized on * paddle_get_params will block until parameters are initialized on
* the parameter servers. * the parameter servers.
* *
* @param names the array of names of the parameters to get. * @param dst the destination array of parameter pointers to save to.
* @param dst the destination array of parameters to save to. * The parameter pointer must be pre-popullated with required parameter name,
* and the content of parameter must be pre-allocated of the size of required
* parameter on pserver.
* @param len the length of the names array and the paddle_parameter * @param len the length of the names array and the paddle_parameter
* array. * array.
* @return 0 if successful, otherwise -1. * @return 0 if successful, otherwise -1.
*/ */
int paddle_get_params(paddle_pserver_client* client, const char** names, paddle_parameter* dst, int len); int paddle_get_params(paddle_pserver_client client, paddle_parameter** dst, int len);
/** /**
* @brief paddle_save_model indicates parameters to save the parameter * @brief paddle_save_model indicates parameters to save the parameter
...@@ -154,5 +167,5 @@ int paddle_get_params(paddle_pserver_client* client, const char** names, paddle_ ...@@ -154,5 +167,5 @@ int paddle_get_params(paddle_pserver_client* client, const char** names, paddle_
* @param path the path to save parameters. * @param path the path to save parameters.
* @return 0 if successful, otherwise -1. * @return 0 if successful, otherwise -1.
*/ */
int paddle_save_model(paddle_pserver_client* client, const char* path); int paddle_save_model(paddle_pserver_client client, const char* path);
``` ```
...@@ -244,14 +244,25 @@ name:sparse-n-1 ...@@ -244,14 +244,25 @@ name:sparse-n-1
<span class="k">typedef</span> <span class="k">struct</span> <span class="p">{</span> <span class="k">typedef</span> <span class="k">struct</span> <span class="p">{</span>
<span class="kt">char</span><span class="o">*</span> <span class="n">name</span><span class="p">;</span> <span class="kt">char</span><span class="o">*</span> <span class="n">name</span><span class="p">;</span>
<span class="n">paddle_element_type</span> <span class="n">element_type</span><span class="p">;</span> <span class="n">paddle_element_type</span> <span class="n">element_type</span><span class="p">;</span>
<span class="kt">void</span><span class="o">*</span> <span class="n">content</span><span class="p">;</span> <span class="kt">unsigned</span> <span class="kt">char</span><span class="o">*</span> <span class="n">content</span><span class="p">;</span>
<span class="kt">int</span> <span class="n">content_len</span><span class="p">;</span> <span class="kt">int</span> <span class="n">content_len</span><span class="p">;</span>
<span class="p">}</span> <span class="n">paddle_parameter</span><span class="p">,</span> <span class="n">paddle_gradient</span><span class="p">;</span> <span class="p">}</span> <span class="n">paddle_parameter</span><span class="p">,</span> <span class="n">paddle_gradient</span><span class="p">;</span>
<span class="k">typedef</span> <span class="k">struct</span> <span class="n">paddle_pserver_client</span> <span class="n">paddle_pserver_client</span><span class="p">;</span> <span class="k">typedef</span> <span class="kt">int</span> <span class="n">paddle_pserver_client</span><span class="p">;</span>
<span class="n">paddle_pserver_client</span><span class="o">*</span> <span class="nf">paddle_new_pserver_client</span><span class="p">();</span> <span class="cm">/**</span>
<span class="kt">void</span> <span class="nf">paddle_pserver_client_release</span><span class="p">(</span><span class="n">paddle_pserver_client</span><span class="o">*</span> <span class="n">client</span><span class="p">);</span> <span class="cm"> * @brief creates a pserver client that talks to etcd for coordination.</span>
<span class="cm"> */</span>
<span class="n">paddle_pserver_client</span> <span class="nf">paddle_new_etcd_pserver_client</span><span class="p">(</span><span class="kt">char</span><span class="o">*</span> <span class="n">etcd_addr</span><span class="p">);</span>
<span class="cm">/**</span>
<span class="cm"> * @brief creates a pserver client given pserver addresses.</span>
<span class="cm"> *</span>
<span class="cm"> * @param pserver_addrs comma-separated pserver addresses.</span>
<span class="cm"> * @param selected if current pserver client is selected to initialize all parameter servers.</span>
<span class="cm"> */</span>
<span class="n">paddle_pserver_client</span> <span class="nf">paddle_new_pserver_client</span><span class="p">(</span><span class="kt">char</span><span class="o">*</span> <span class="n">pserver_addrs</span><span class="p">,</span> <span class="kt">int</span> <span class="n">selected</span><span class="p">);</span>
<span class="kt">void</span> <span class="nf">paddle_pserver_client_release</span><span class="p">(</span><span class="n">paddle_pserver_client</span> <span class="n">c</span><span class="p">);</span>
<span class="cm">/**</span> <span class="cm">/**</span>
<span class="cm"> * @brief paddle_begin_init_params begins to initialize parameters on</span> <span class="cm"> * @brief paddle_begin_init_params begins to initialize parameters on</span>
...@@ -265,7 +276,7 @@ name:sparse-n-1 ...@@ -265,7 +276,7 @@ name:sparse-n-1
<span class="cm"> * @return 1 if the trainer is selected to initialize parameter</span> <span class="cm"> * @return 1 if the trainer is selected to initialize parameter</span>
<span class="cm"> * servers, otherwise 0.</span> <span class="cm"> * servers, otherwise 0.</span>
<span class="cm"> */</span> <span class="cm"> */</span>
<span class="kt">int</span> <span class="nf">paddle_begin_init_params</span><span class="p">(</span><span class="n">paddle_pserver_client</span><span class="o">*</span> <span class="n">client</span><span class="p">);</span> <span class="kt">int</span> <span class="nf">paddle_begin_init_params</span><span class="p">(</span><span class="n">paddle_pserver_client</span> <span class="n">client</span><span class="p">);</span>
<span class="cm">/**</span> <span class="cm">/**</span>
<span class="cm"> * @brief paddle_init_param initializes the parameter on parameter</span> <span class="cm"> * @brief paddle_init_param initializes the parameter on parameter</span>
...@@ -279,7 +290,7 @@ name:sparse-n-1 ...@@ -279,7 +290,7 @@ name:sparse-n-1
<span class="cm"> * @paddle_begin_init_param). Or simply exit the program and wait for</span> <span class="cm"> * @paddle_begin_init_param). Or simply exit the program and wait for</span>
<span class="cm"> * the cluster management system to restart the trainer.</span> <span class="cm"> * the cluster management system to restart the trainer.</span>
<span class="cm"> */</span> <span class="cm"> */</span>
<span class="kt">int</span> <span class="nf">paddle_init_param</span><span class="p">(</span><span class="n">paddle_pserver_client</span><span class="o">*</span> <span class="n">client</span><span class="p">,</span> <span class="n">paddle_parameter</span> <span class="n">param</span><span class="p">,</span> <span class="k">const</span> <span class="kt">unsigned</span> <span class="kt">char</span><span class="o">*</span> <span class="n">param_config_proto</span><span class="p">,</span> <span class="kt">int</span> <span class="n">config_len</span><span class="p">);</span> <span class="kt">int</span> <span class="nf">paddle_init_param</span><span class="p">(</span><span class="n">paddle_pserver_client</span> <span class="n">client</span><span class="p">,</span> <span class="n">paddle_parameter</span> <span class="n">param</span><span class="p">,</span> <span class="k">const</span> <span class="kt">unsigned</span> <span class="kt">char</span><span class="o">*</span> <span class="n">param_config_proto</span><span class="p">,</span> <span class="kt">int</span> <span class="n">config_len</span><span class="p">);</span>
<span class="cm">/**</span> <span class="cm">/**</span>
<span class="cm"> * @brief paddle_finish_init_params tells parameter servers client has</span> <span class="cm"> * @brief paddle_finish_init_params tells parameter servers client has</span>
...@@ -290,7 +301,7 @@ name:sparse-n-1 ...@@ -290,7 +301,7 @@ name:sparse-n-1
<span class="cm"> * @paddle_begin_init_param). Or simply exit the program and wait for</span> <span class="cm"> * @paddle_begin_init_param). Or simply exit the program and wait for</span>
<span class="cm"> * the cluster management system to restart the trainer.</span> <span class="cm"> * the cluster management system to restart the trainer.</span>
<span class="cm"> */</span> <span class="cm"> */</span>
<span class="kt">int</span> <span class="nf">paddle_finish_init_params</span><span class="p">(</span><span class="n">paddle_pserver_client</span><span class="o">*</span> <span class="n">client</span><span class="p">);</span> <span class="kt">int</span> <span class="nf">paddle_finish_init_params</span><span class="p">(</span><span class="n">paddle_pserver_client</span> <span class="n">client</span><span class="p">);</span>
<span class="cm">/**</span> <span class="cm">/**</span>
<span class="cm"> * @brief paddle_send_grads sends gradients to parameter servers for</span> <span class="cm"> * @brief paddle_send_grads sends gradients to parameter servers for</span>
...@@ -301,7 +312,7 @@ name:sparse-n-1 ...@@ -301,7 +312,7 @@ name:sparse-n-1
<span class="cm"> * @param learning_rate the learning rate for the gradients.</span> <span class="cm"> * @param learning_rate the learning rate for the gradients.</span>
<span class="cm"> * @return 0 if successful, otherwise -1.</span> <span class="cm"> * @return 0 if successful, otherwise -1.</span>
<span class="cm"> */</span> <span class="cm"> */</span>
<span class="kt">int</span> <span class="nf">paddle_send_grads</span><span class="p">(</span><span class="n">paddle_pserver_client</span><span class="o">*</span> <span class="n">client</span><span class="p">,</span> <span class="k">const</span> <span class="n">paddle_gradient</span><span class="o">*</span> <span class="n">grads</span><span class="p">,</span> <span class="kt">int</span> <span class="n">len</span><span class="p">);</span> <span class="kt">int</span> <span class="nf">paddle_send_grads</span><span class="p">(</span><span class="n">paddle_pserver_client</span> <span class="n">client</span><span class="p">,</span> <span class="k">const</span> <span class="n">paddle_gradient</span><span class="o">*</span> <span class="n">grads</span><span class="p">,</span> <span class="kt">int</span> <span class="n">len</span><span class="p">);</span>
<span class="cm">/**</span> <span class="cm">/**</span>
<span class="cm"> * @brief paddle_get_params gets parameters from parameter servers.</span> <span class="cm"> * @brief paddle_get_params gets parameters from parameter servers.</span>
...@@ -309,13 +320,15 @@ name:sparse-n-1 ...@@ -309,13 +320,15 @@ name:sparse-n-1
<span class="cm"> * paddle_get_params will block until parameters are initialized on</span> <span class="cm"> * paddle_get_params will block until parameters are initialized on</span>
<span class="cm"> * the parameter servers.</span> <span class="cm"> * the parameter servers.</span>
<span class="cm"> *</span> <span class="cm"> *</span>
<span class="cm"> * @param names the array of names of the parameters to get.</span> <span class="cm"> * @param dst the destination array of parameter pointers to save to.</span>
<span class="cm"> * @param dst the destination array of parameters to save to.</span> <span class="cm"> * The parameter pointer must be pre-popullated with required parameter name,</span>
<span class="cm"> * and the content of parameter must be pre-allocated of the size of required</span>
<span class="cm"> * parameter on pserver.</span>
<span class="cm"> * @param len the length of the names array and the paddle_parameter</span> <span class="cm"> * @param len the length of the names array and the paddle_parameter</span>
<span class="cm"> * array.</span> <span class="cm"> * array.</span>
<span class="cm"> * @return 0 if successful, otherwise -1.</span> <span class="cm"> * @return 0 if successful, otherwise -1.</span>
<span class="cm"> */</span> <span class="cm"> */</span>
<span class="kt">int</span> <span class="nf">paddle_get_params</span><span class="p">(</span><span class="n">paddle_pserver_client</span><span class="o">*</span> <span class="n">client</span><span class="p">,</span> <span class="k">const</span> <span class="kt">char</span><span class="o">**</span> <span class="n">names</span><span class="p">,</span> <span class="n">paddle_parameter</span><span class="o">*</span> <span class="n">dst</span><span class="p">,</span> <span class="kt">int</span> <span class="n">len</span><span class="p">);</span> <span class="kt">int</span> <span class="nf">paddle_get_params</span><span class="p">(</span><span class="n">paddle_pserver_client</span> <span class="n">client</span><span class="p">,</span> <span class="n">paddle_parameter</span><span class="o">**</span> <span class="n">dst</span><span class="p">,</span> <span class="kt">int</span> <span class="n">len</span><span class="p">);</span>
<span class="cm">/**</span> <span class="cm">/**</span>
<span class="cm"> * @brief paddle_save_model indicates parameters to save the parameter</span> <span class="cm"> * @brief paddle_save_model indicates parameters to save the parameter</span>
...@@ -324,7 +337,7 @@ name:sparse-n-1 ...@@ -324,7 +337,7 @@ name:sparse-n-1
<span class="cm"> * @param path the path to save parameters.</span> <span class="cm"> * @param path the path to save parameters.</span>
<span class="cm"> * @return 0 if successful, otherwise -1.</span> <span class="cm"> * @return 0 if successful, otherwise -1.</span>
<span class="cm"> */</span> <span class="cm"> */</span>
<span class="kt">int</span> <span class="nf">paddle_save_model</span><span class="p">(</span><span class="n">paddle_pserver_client</span><span class="o">*</span> <span class="n">client</span><span class="p">,</span> <span class="k">const</span> <span class="kt">char</span><span class="o">*</span> <span class="n">path</span><span class="p">);</span> <span class="kt">int</span> <span class="nf">paddle_save_model</span><span class="p">(</span><span class="n">paddle_pserver_client</span> <span class="n">client</span><span class="p">,</span> <span class="k">const</span> <span class="kt">char</span><span class="o">*</span> <span class="n">path</span><span class="p">);</span>
</pre></div> </pre></div>
</div> </div>
</div> </div>
......
因为 它太大了无法显示 source diff 。你可以改为 查看blob
...@@ -74,14 +74,25 @@ typedef enum { ...@@ -74,14 +74,25 @@ typedef enum {
typedef struct { typedef struct {
char* name; char* name;
paddle_element_type element_type; paddle_element_type element_type;
void* content; unsigned char* content;
int content_len; int content_len;
} paddle_parameter, paddle_gradient; } paddle_parameter, paddle_gradient;
typedef struct paddle_pserver_client paddle_pserver_client; typedef int paddle_pserver_client;
paddle_pserver_client* paddle_new_pserver_client(); /**
void paddle_pserver_client_release(paddle_pserver_client* client); * @brief creates a pserver client that talks to etcd for coordination.
*/
paddle_pserver_client paddle_new_etcd_pserver_client(char* etcd_addr);
/**
* @brief creates a pserver client given pserver addresses.
*
* @param pserver_addrs comma-separated pserver addresses.
* @param selected if current pserver client is selected to initialize all parameter servers.
*/
paddle_pserver_client paddle_new_pserver_client(char* pserver_addrs, int selected);
void paddle_pserver_client_release(paddle_pserver_client c);
/** /**
* @brief paddle_begin_init_params begins to initialize parameters on * @brief paddle_begin_init_params begins to initialize parameters on
...@@ -95,7 +106,7 @@ void paddle_pserver_client_release(paddle_pserver_client* client); ...@@ -95,7 +106,7 @@ void paddle_pserver_client_release(paddle_pserver_client* client);
* @return 1 if the trainer is selected to initialize parameter * @return 1 if the trainer is selected to initialize parameter
* servers, otherwise 0. * servers, otherwise 0.
*/ */
int paddle_begin_init_params(paddle_pserver_client* client); int paddle_begin_init_params(paddle_pserver_client client);
/** /**
* @brief paddle_init_param initializes the parameter on parameter * @brief paddle_init_param initializes the parameter on parameter
...@@ -109,7 +120,7 @@ int paddle_begin_init_params(paddle_pserver_client* client); ...@@ -109,7 +120,7 @@ int paddle_begin_init_params(paddle_pserver_client* client);
* @paddle_begin_init_param). Or simply exit the program and wait for * @paddle_begin_init_param). Or simply exit the program and wait for
* the cluster management system to restart the trainer. * the cluster management system to restart the trainer.
*/ */
int paddle_init_param(paddle_pserver_client* client, paddle_parameter param, const unsigned char* param_config_proto, int config_len); int paddle_init_param(paddle_pserver_client client, paddle_parameter param, const unsigned char* param_config_proto, int config_len);
/** /**
* @brief paddle_finish_init_params tells parameter servers client has * @brief paddle_finish_init_params tells parameter servers client has
...@@ -120,7 +131,7 @@ int paddle_init_param(paddle_pserver_client* client, paddle_parameter param, con ...@@ -120,7 +131,7 @@ int paddle_init_param(paddle_pserver_client* client, paddle_parameter param, con
* @paddle_begin_init_param). Or simply exit the program and wait for * @paddle_begin_init_param). Or simply exit the program and wait for
* the cluster management system to restart the trainer. * the cluster management system to restart the trainer.
*/ */
int paddle_finish_init_params(paddle_pserver_client* client); int paddle_finish_init_params(paddle_pserver_client client);
/** /**
* @brief paddle_send_grads sends gradients to parameter servers for * @brief paddle_send_grads sends gradients to parameter servers for
...@@ -131,7 +142,7 @@ int paddle_finish_init_params(paddle_pserver_client* client); ...@@ -131,7 +142,7 @@ int paddle_finish_init_params(paddle_pserver_client* client);
* @param learning_rate the learning rate for the gradients. * @param learning_rate the learning rate for the gradients.
* @return 0 if successful, otherwise -1. * @return 0 if successful, otherwise -1.
*/ */
int paddle_send_grads(paddle_pserver_client* client, const paddle_gradient* grads, int len); int paddle_send_grads(paddle_pserver_client client, const paddle_gradient* grads, int len);
/** /**
* @brief paddle_get_params gets parameters from parameter servers. * @brief paddle_get_params gets parameters from parameter servers.
...@@ -139,13 +150,15 @@ int paddle_send_grads(paddle_pserver_client* client, const paddle_gradient* grad ...@@ -139,13 +150,15 @@ int paddle_send_grads(paddle_pserver_client* client, const paddle_gradient* grad
* paddle_get_params will block until parameters are initialized on * paddle_get_params will block until parameters are initialized on
* the parameter servers. * the parameter servers.
* *
* @param names the array of names of the parameters to get. * @param dst the destination array of parameter pointers to save to.
* @param dst the destination array of parameters to save to. * The parameter pointer must be pre-popullated with required parameter name,
* and the content of parameter must be pre-allocated of the size of required
* parameter on pserver.
* @param len the length of the names array and the paddle_parameter * @param len the length of the names array and the paddle_parameter
* array. * array.
* @return 0 if successful, otherwise -1. * @return 0 if successful, otherwise -1.
*/ */
int paddle_get_params(paddle_pserver_client* client, const char** names, paddle_parameter* dst, int len); int paddle_get_params(paddle_pserver_client client, paddle_parameter** dst, int len);
/** /**
* @brief paddle_save_model indicates parameters to save the parameter * @brief paddle_save_model indicates parameters to save the parameter
...@@ -154,5 +167,5 @@ int paddle_get_params(paddle_pserver_client* client, const char** names, paddle_ ...@@ -154,5 +167,5 @@ int paddle_get_params(paddle_pserver_client* client, const char** names, paddle_
* @param path the path to save parameters. * @param path the path to save parameters.
* @return 0 if successful, otherwise -1. * @return 0 if successful, otherwise -1.
*/ */
int paddle_save_model(paddle_pserver_client* client, const char* path); int paddle_save_model(paddle_pserver_client client, const char* path);
``` ```
...@@ -251,14 +251,25 @@ name:sparse-n-1 ...@@ -251,14 +251,25 @@ name:sparse-n-1
<span class="k">typedef</span> <span class="k">struct</span> <span class="p">{</span> <span class="k">typedef</span> <span class="k">struct</span> <span class="p">{</span>
<span class="kt">char</span><span class="o">*</span> <span class="n">name</span><span class="p">;</span> <span class="kt">char</span><span class="o">*</span> <span class="n">name</span><span class="p">;</span>
<span class="n">paddle_element_type</span> <span class="n">element_type</span><span class="p">;</span> <span class="n">paddle_element_type</span> <span class="n">element_type</span><span class="p">;</span>
<span class="kt">void</span><span class="o">*</span> <span class="n">content</span><span class="p">;</span> <span class="kt">unsigned</span> <span class="kt">char</span><span class="o">*</span> <span class="n">content</span><span class="p">;</span>
<span class="kt">int</span> <span class="n">content_len</span><span class="p">;</span> <span class="kt">int</span> <span class="n">content_len</span><span class="p">;</span>
<span class="p">}</span> <span class="n">paddle_parameter</span><span class="p">,</span> <span class="n">paddle_gradient</span><span class="p">;</span> <span class="p">}</span> <span class="n">paddle_parameter</span><span class="p">,</span> <span class="n">paddle_gradient</span><span class="p">;</span>
<span class="k">typedef</span> <span class="k">struct</span> <span class="n">paddle_pserver_client</span> <span class="n">paddle_pserver_client</span><span class="p">;</span> <span class="k">typedef</span> <span class="kt">int</span> <span class="n">paddle_pserver_client</span><span class="p">;</span>
<span class="n">paddle_pserver_client</span><span class="o">*</span> <span class="nf">paddle_new_pserver_client</span><span class="p">();</span> <span class="cm">/**</span>
<span class="kt">void</span> <span class="nf">paddle_pserver_client_release</span><span class="p">(</span><span class="n">paddle_pserver_client</span><span class="o">*</span> <span class="n">client</span><span class="p">);</span> <span class="cm"> * @brief creates a pserver client that talks to etcd for coordination.</span>
<span class="cm"> */</span>
<span class="n">paddle_pserver_client</span> <span class="nf">paddle_new_etcd_pserver_client</span><span class="p">(</span><span class="kt">char</span><span class="o">*</span> <span class="n">etcd_addr</span><span class="p">);</span>
<span class="cm">/**</span>
<span class="cm"> * @brief creates a pserver client given pserver addresses.</span>
<span class="cm"> *</span>
<span class="cm"> * @param pserver_addrs comma-separated pserver addresses.</span>
<span class="cm"> * @param selected if current pserver client is selected to initialize all parameter servers.</span>
<span class="cm"> */</span>
<span class="n">paddle_pserver_client</span> <span class="nf">paddle_new_pserver_client</span><span class="p">(</span><span class="kt">char</span><span class="o">*</span> <span class="n">pserver_addrs</span><span class="p">,</span> <span class="kt">int</span> <span class="n">selected</span><span class="p">);</span>
<span class="kt">void</span> <span class="nf">paddle_pserver_client_release</span><span class="p">(</span><span class="n">paddle_pserver_client</span> <span class="n">c</span><span class="p">);</span>
<span class="cm">/**</span> <span class="cm">/**</span>
<span class="cm"> * @brief paddle_begin_init_params begins to initialize parameters on</span> <span class="cm"> * @brief paddle_begin_init_params begins to initialize parameters on</span>
...@@ -272,7 +283,7 @@ name:sparse-n-1 ...@@ -272,7 +283,7 @@ name:sparse-n-1
<span class="cm"> * @return 1 if the trainer is selected to initialize parameter</span> <span class="cm"> * @return 1 if the trainer is selected to initialize parameter</span>
<span class="cm"> * servers, otherwise 0.</span> <span class="cm"> * servers, otherwise 0.</span>
<span class="cm"> */</span> <span class="cm"> */</span>
<span class="kt">int</span> <span class="nf">paddle_begin_init_params</span><span class="p">(</span><span class="n">paddle_pserver_client</span><span class="o">*</span> <span class="n">client</span><span class="p">);</span> <span class="kt">int</span> <span class="nf">paddle_begin_init_params</span><span class="p">(</span><span class="n">paddle_pserver_client</span> <span class="n">client</span><span class="p">);</span>
<span class="cm">/**</span> <span class="cm">/**</span>
<span class="cm"> * @brief paddle_init_param initializes the parameter on parameter</span> <span class="cm"> * @brief paddle_init_param initializes the parameter on parameter</span>
...@@ -286,7 +297,7 @@ name:sparse-n-1 ...@@ -286,7 +297,7 @@ name:sparse-n-1
<span class="cm"> * @paddle_begin_init_param). Or simply exit the program and wait for</span> <span class="cm"> * @paddle_begin_init_param). Or simply exit the program and wait for</span>
<span class="cm"> * the cluster management system to restart the trainer.</span> <span class="cm"> * the cluster management system to restart the trainer.</span>
<span class="cm"> */</span> <span class="cm"> */</span>
<span class="kt">int</span> <span class="nf">paddle_init_param</span><span class="p">(</span><span class="n">paddle_pserver_client</span><span class="o">*</span> <span class="n">client</span><span class="p">,</span> <span class="n">paddle_parameter</span> <span class="n">param</span><span class="p">,</span> <span class="k">const</span> <span class="kt">unsigned</span> <span class="kt">char</span><span class="o">*</span> <span class="n">param_config_proto</span><span class="p">,</span> <span class="kt">int</span> <span class="n">config_len</span><span class="p">);</span> <span class="kt">int</span> <span class="nf">paddle_init_param</span><span class="p">(</span><span class="n">paddle_pserver_client</span> <span class="n">client</span><span class="p">,</span> <span class="n">paddle_parameter</span> <span class="n">param</span><span class="p">,</span> <span class="k">const</span> <span class="kt">unsigned</span> <span class="kt">char</span><span class="o">*</span> <span class="n">param_config_proto</span><span class="p">,</span> <span class="kt">int</span> <span class="n">config_len</span><span class="p">);</span>
<span class="cm">/**</span> <span class="cm">/**</span>
<span class="cm"> * @brief paddle_finish_init_params tells parameter servers client has</span> <span class="cm"> * @brief paddle_finish_init_params tells parameter servers client has</span>
...@@ -297,7 +308,7 @@ name:sparse-n-1 ...@@ -297,7 +308,7 @@ name:sparse-n-1
<span class="cm"> * @paddle_begin_init_param). Or simply exit the program and wait for</span> <span class="cm"> * @paddle_begin_init_param). Or simply exit the program and wait for</span>
<span class="cm"> * the cluster management system to restart the trainer.</span> <span class="cm"> * the cluster management system to restart the trainer.</span>
<span class="cm"> */</span> <span class="cm"> */</span>
<span class="kt">int</span> <span class="nf">paddle_finish_init_params</span><span class="p">(</span><span class="n">paddle_pserver_client</span><span class="o">*</span> <span class="n">client</span><span class="p">);</span> <span class="kt">int</span> <span class="nf">paddle_finish_init_params</span><span class="p">(</span><span class="n">paddle_pserver_client</span> <span class="n">client</span><span class="p">);</span>
<span class="cm">/**</span> <span class="cm">/**</span>
<span class="cm"> * @brief paddle_send_grads sends gradients to parameter servers for</span> <span class="cm"> * @brief paddle_send_grads sends gradients to parameter servers for</span>
...@@ -308,7 +319,7 @@ name:sparse-n-1 ...@@ -308,7 +319,7 @@ name:sparse-n-1
<span class="cm"> * @param learning_rate the learning rate for the gradients.</span> <span class="cm"> * @param learning_rate the learning rate for the gradients.</span>
<span class="cm"> * @return 0 if successful, otherwise -1.</span> <span class="cm"> * @return 0 if successful, otherwise -1.</span>
<span class="cm"> */</span> <span class="cm"> */</span>
<span class="kt">int</span> <span class="nf">paddle_send_grads</span><span class="p">(</span><span class="n">paddle_pserver_client</span><span class="o">*</span> <span class="n">client</span><span class="p">,</span> <span class="k">const</span> <span class="n">paddle_gradient</span><span class="o">*</span> <span class="n">grads</span><span class="p">,</span> <span class="kt">int</span> <span class="n">len</span><span class="p">);</span> <span class="kt">int</span> <span class="nf">paddle_send_grads</span><span class="p">(</span><span class="n">paddle_pserver_client</span> <span class="n">client</span><span class="p">,</span> <span class="k">const</span> <span class="n">paddle_gradient</span><span class="o">*</span> <span class="n">grads</span><span class="p">,</span> <span class="kt">int</span> <span class="n">len</span><span class="p">);</span>
<span class="cm">/**</span> <span class="cm">/**</span>
<span class="cm"> * @brief paddle_get_params gets parameters from parameter servers.</span> <span class="cm"> * @brief paddle_get_params gets parameters from parameter servers.</span>
...@@ -316,13 +327,15 @@ name:sparse-n-1 ...@@ -316,13 +327,15 @@ name:sparse-n-1
<span class="cm"> * paddle_get_params will block until parameters are initialized on</span> <span class="cm"> * paddle_get_params will block until parameters are initialized on</span>
<span class="cm"> * the parameter servers.</span> <span class="cm"> * the parameter servers.</span>
<span class="cm"> *</span> <span class="cm"> *</span>
<span class="cm"> * @param names the array of names of the parameters to get.</span> <span class="cm"> * @param dst the destination array of parameter pointers to save to.</span>
<span class="cm"> * @param dst the destination array of parameters to save to.</span> <span class="cm"> * The parameter pointer must be pre-popullated with required parameter name,</span>
<span class="cm"> * and the content of parameter must be pre-allocated of the size of required</span>
<span class="cm"> * parameter on pserver.</span>
<span class="cm"> * @param len the length of the names array and the paddle_parameter</span> <span class="cm"> * @param len the length of the names array and the paddle_parameter</span>
<span class="cm"> * array.</span> <span class="cm"> * array.</span>
<span class="cm"> * @return 0 if successful, otherwise -1.</span> <span class="cm"> * @return 0 if successful, otherwise -1.</span>
<span class="cm"> */</span> <span class="cm"> */</span>
<span class="kt">int</span> <span class="nf">paddle_get_params</span><span class="p">(</span><span class="n">paddle_pserver_client</span><span class="o">*</span> <span class="n">client</span><span class="p">,</span> <span class="k">const</span> <span class="kt">char</span><span class="o">**</span> <span class="n">names</span><span class="p">,</span> <span class="n">paddle_parameter</span><span class="o">*</span> <span class="n">dst</span><span class="p">,</span> <span class="kt">int</span> <span class="n">len</span><span class="p">);</span> <span class="kt">int</span> <span class="nf">paddle_get_params</span><span class="p">(</span><span class="n">paddle_pserver_client</span> <span class="n">client</span><span class="p">,</span> <span class="n">paddle_parameter</span><span class="o">**</span> <span class="n">dst</span><span class="p">,</span> <span class="kt">int</span> <span class="n">len</span><span class="p">);</span>
<span class="cm">/**</span> <span class="cm">/**</span>
<span class="cm"> * @brief paddle_save_model indicates parameters to save the parameter</span> <span class="cm"> * @brief paddle_save_model indicates parameters to save the parameter</span>
...@@ -331,7 +344,7 @@ name:sparse-n-1 ...@@ -331,7 +344,7 @@ name:sparse-n-1
<span class="cm"> * @param path the path to save parameters.</span> <span class="cm"> * @param path the path to save parameters.</span>
<span class="cm"> * @return 0 if successful, otherwise -1.</span> <span class="cm"> * @return 0 if successful, otherwise -1.</span>
<span class="cm"> */</span> <span class="cm"> */</span>
<span class="kt">int</span> <span class="nf">paddle_save_model</span><span class="p">(</span><span class="n">paddle_pserver_client</span><span class="o">*</span> <span class="n">client</span><span class="p">,</span> <span class="k">const</span> <span class="kt">char</span><span class="o">*</span> <span class="n">path</span><span class="p">);</span> <span class="kt">int</span> <span class="nf">paddle_save_model</span><span class="p">(</span><span class="n">paddle_pserver_client</span> <span class="n">client</span><span class="p">,</span> <span class="k">const</span> <span class="kt">char</span><span class="o">*</span> <span class="n">path</span><span class="p">);</span>
</pre></div> </pre></div>
</div> </div>
</div> </div>
......
因为 它太大了无法显示 source diff 。你可以改为 查看blob
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册