@@ -406,7 +406,9 @@ run_channel_example(15)
406406
407407# To use distributed computing with Julia, use the `Distributed` package:
408408
409- import Distributed
409+ # ```julia
410+ # julia> import Distributed
411+ # ```
410412
411413# Like multi-threading, we need to tell Julia how many processes to add. We can
412414# do this either by starting Julia with the `-p N` command line argument, or by
@@ -432,7 +434,10 @@ import Distributed
432434# with. They are orchestrated by the process with the id `1`. You can check
433435# what process the code is currently running on using `Distributed.myid()`
434436
435- Distributed. myid ()
437+ # ```julia
438+ # julia> Distributed.myid()
439+ # 1
440+ # ```
436441
437442# As a general rule, to get maximum performance you should add as many processes
438443# as you have logical cores available.
@@ -465,12 +470,14 @@ Distributed.myid()
465470# To fix the error, we need to use `Distributed.@everywhere`, which evaluates
466471# the code on every process:
467472
468- Distributed. @everywhere begin
469- function hard_work (i:: Int )
470- sleep (1.0 )
471- return Distributed. myid ()
472- end
473- end
473+ # ```julia
474+ # julia> Distributed.@everywhere begin
475+ # function hard_work(i::Int)
476+ # sleep(1.0)
477+ # return Distributed.myid()
478+ # end
479+ # end
480+ # ```
474481
475482# Now if we run `pmap`, we see that it took only 1 second instead of 4, and that
476483# it executed on each of the worker processes:
@@ -495,25 +502,24 @@ end
495502# processes using `Distributed.@everywhere`, and then write a function which
496503# creates a new instance of the model on every evaluation:
497504
498- Distributed. @everywhere begin
499- using JuMP
500- import HiGHS
501- end
502-
503- Distributed. @everywhere begin
504- function solve_model_with_right_hand_side (i)
505- model = Model (HiGHS. Optimizer)
506- set_silent (model)
507- @variable (model, x)
508- @objective (model, Min, x)
509- set_lower_bound (x, i)
510- optimize! (model)
511- assert_is_solved_and_feasible (sudoku)
512- return objective_value (model)
513- end
514- end
515-
516505# ```julia
506+ # julia> Distributed.@everywhere begin
507+ # using JuMP
508+ # import HiGHS
509+ # end
510+ #
511+ # julia> Distributed.@everywhere begin
512+ # function solve_model_with_right_hand_side(i)
513+ # model = Model(HiGHS.Optimizer)
514+ # set_silent(model)
515+ # @variable(model, x)
516+ # @objective(model, Min, x)
517+ # set_lower_bound(x, i)
518+ # optimize!(model)
519+ # assert_is_solved_and_feasible(sudoku)
520+ # return objective_value(model)
521+ # end
522+ # end
517523# julia> solutions = Distributed.pmap(solve_model_with_right_hand_side, 1:10)
518524# 10-element Vector{Float64}:
519525# 1.0
0 commit comments