# error in runnig

Hi everyone
I run polyhedra particles in this computer:

CPU (Cores):16
2.3 Hz
Hard disc (HDD): 80 GB
Ram: 32 GB
ubuntu : 16.04

number of bodies : 1511

After 4780000 iteration, run stopped with this error:

"terminate called after throwing an instance of 'CGAL::Assertion_exception'
what(): CGAL ERROR: assertion violation!
Expr: dexp != 2047
File: /usr/include/CGAL/Mpzf.h
Line: 433
Explanation: Creating an Mpzf from infinity or NaN.
Aborted (core dumped)"

what is the problem?

Thanks,
Mahdeyeh

Here is my code:

import os
import math
import pylab
import matplotlib; matplotlib.rc('axes',grid=True)
from matplotlib import pyplot
import numpy as np
from numpy import *
from yade import export as expt

# Input data:

RawVer=np.genfromtxt('ring.txt',names=True,dtype=None)
# ListVer is list of all the vertices of ring`s polygons
Ver=()
ListVer=[]
for b in RawVer:
if b=='*LWPOLYLINE':
ListVer.append(Ver)
Ver=()
continue
Cordn=b
Cordn=np.fromstring(Cordn, sep=',')
Cordn=tuple(Cordn.tolist())
Cordn1=Cordn+(0.1,) # add z vertex to coordinates
Cordn2=Cordn+(0.2,) # add z vertex to coordinates
if not Cordn1 in Ver:
Ver=Ver+(Cordn1,Cordn2)
ListVer.append(Ver)

RawVer1=np.genfromtxt('boundary.txt',names=True,dtype=None)
# ListVer1 is list of all the vertices of boundary`s polygons
Ver1=()
ListVer1=[]
for b in RawVer1:
if b=='*LWPOLYLINE':
ListVer1.append(Ver1)
Ver1=()
continue
Cordn=b
Cordn=np.fromstring(Cordn, sep=',')
Cordn=tuple(Cordn.tolist())
Cordn1=Cordn+(-2,) # add z vertex to coordinates
Cordn2=Cordn+(2,) # add z vertex to coordinates
if not Cordn1 in Ver1:
Ver1=Ver1+(Cordn1,Cordn2)
ListVer1.append(Ver1)

RawVer2=np.genfromtxt('waste.txt',names=True,dtype=None)
# ListVer2 is list of all the vertices of caved waste rock`s polygons
Ver2=()
ListVer2=[]
for b in RawVer2:
if b=='*LWPOLYLINE':
ListVer2.append(Ver2)
Ver2=()
continue
Cordn=b
Cordn=np.fromstring(Cordn, sep=',')
Cordn=tuple(Cordn.tolist())
Cordn1=Cordn+(0.1,) # add z vertex to coordinates
Cordn2=Cordn+(0.2,) # add z vertex to coordinates
if not Cordn1 in Ver2:
Ver2=Ver2+(Cordn1,Cordn2)
ListVer2.append(Ver2)

# Materials type:

Dolomite = PolyhedraMat()
Dolomite.density = 2870 #kg/m^3
Dolomite.young = 24.36e9 #Pa
Dolomite.poisson = 0.2

Shale = PolyhedraMat()
Shale.density = 2750 #kg/m^3
Shale.young = 6e9 #Pa
Shale.poisson = 0.23

for ii in ListVer:

for iii in ListVer1:

for iiii in ListVer2:

O.bodies.erase(340) # delete wall under ring: id: 340

# block z translation and block x, y, z rotations
for b in O.bodies:
b.state.blockedDOFs='zXY'
for b in O.bodies:
b.state.blockedDOFs='zXY'

# function for calming particles
def calm():
for c in O.bodies:
c.state.vel=Vector3(0,0,0)
c.state.angVel=Vector3(0,0,0)

# returns a value that can be useful for evaluating the stability of the packing. It is defined as (mean force on particles)/(mean contact force), so that it tends to 0 in a stable packing.
def checkUnbalanced():
print "iter %d , unbalanced forces %f"%(O.iter, utils.unbalancedForce()) # %[(keyname)][flags][width][.precision]typecode : String Formatting
iter00=O.iter

Unbalanced=open("Unbalanced iter Unbalanced forces.txt","a")
Unbalanced.write(repr(iter00)+' '+repr(utils.unbalancedForce())+' '+"\n")
Unbalanced.close()

# Engines:

O.engines=[
ForceResetter(),
InsertionSortCollider([Bo1_Polyhedra_Aabb(),]), # We can set collider's verletDist to a fraction of the polyhedra minimum size, since it determines how much is each body enlarged to avoid collision detection at every step.
InteractionLoop(
[Ig2_Polyhedra_Polyhedra_PolyhedraGeom(),],
[Ip2_PolyhedraMat_PolyhedraMat_PolyhedraPhys()], # collision "physics"
[Law2_PolyhedraGeom_PolyhedraPhys_Volumetric()] # contact law -- apply forces
),
NewtonIntegrator(gravity=(0,-9.81,0),damping=0.2),
]

utils.calm()

# the model has to calm, because there are some overlaps in particles.
O.engines=O.engines+[PyRunner(iterPeriod=20,command='calm()',label="calmRunner")] # because we need to calm only on the first few steps in our model.
O.engines=O.engines+[PyRunner(command='checkUnbalanced()',iterPeriod=10000,label="checker")] # call our function defined above 1000 cycles

O.dt=10e-6

O.saveTmp('model1')
O.save('model1.bz2')

# first run of model
O.run(50000,True)

O.save('model2.bz2')

# Outputs:

clrOre=[0.1,0.5,0.2]
clrWaste=[0.9,0.81,0.45]

def positions():
for b in O.bodies:
if b.shape.color==clrOre:
time1=O.time

Positions=open("Positions time x y z.txt","a")
Positions.write(repr(time1)+' '+repr(b.state.pos)+' '+repr(b.state.pos)+' '+repr(b.state.pos)+' '"\n")
Positions.close()

def oreAmount():
clrOre=[0.1,0.5,0.2]
M_o=0
for b in O.bodies:
if b.shape.color==clrOre:
M_o+=b.state.mass
return M_o

os.mkdir(O.tags['id']) # Created separate directory and put all files of VTK in it, till external files be seperate because of large simulation.
O.engines=O.engines+[PyRunner(iterPeriod=20000,command='VTKview()',label="VTKview")] # every 20000 cycles we can see picture of model in Paraview
PolVtkData = expt.VTKExporter(O.tags['id']+'/'+'polData') # save animation and images of simulation in VTK format (paraview) in defined cycle
def VTKview():
PolVtkData.exportPolyhedra()

m_o = 0.00005
m_w = 0.00005

R = 0

D = 0
D_m = 0

E = 0 # E extraction

M_o = oreAmount()

totalEMass = 1.25 * M_o # Convergence criteria for the models are set as 125% extraction of the ring mass.

def excavation():
m_o = 0.00005
m_w = 0.00005
for b in O.bodies:
if b.state.pos>(-2.4) and b.state.pos<1 and b.state.pos>0 and b.state.pos<1:

if b.shape.color==clrOre:
m_o += b.state.mass #total ore mass extracted
print 'ore'
if b.shape.color==clrWaste:
m_w += b.state.mass #total waste mass
print 'waste'

print 'm_o' , m_o , 'm_w' , m_w
iter1=O.iter

Mass=open("Mass iter massore masswaste.txt","a")
Mass.write(repr(iter1)+' '+repr(m_o)+' '+repr(m_w)+' '"\n")
Mass.close()

E = ( m_o + m_w) / M_o * 100 ##extraction definition

D = (m_w / (m_w + m_o)) * 100 ##ore dilution

D_m = (1- ( m_o / (m_w + m_o))) * 100 ##metal dilution

R = (m_o / M_o) * 100 ## Total ore recovery
print 'E' , E , 'D' , D , 'D_m' , D_m , 'R' , R
iter2=O.iter

DataAnalysis=open("DataAnalysis iter E D DM R.txt","a")
DataAnalysis.write(repr(iter2)+' '+repr(E)+' '+repr(D)+' '+repr(D_m)+ ' '+repr(R)+"\n")
DataAnalysis.close()

# Extra Engines:

O.engines=O.engines+[PyRunner(command='positions()',realPeriod=0.5,label="flow")] # call our function defined above every 60 seconds

O.engines=O.engines+[PyRunner(iterPeriod=200,command='excavation()',label="excavate")] # call our function defined above every 200 cycles

polyhedra_utils.SizeRatio()
export.textPolyhedra('poly.txt')

## Question information

Language:
English Edit question
Status:
Solved
For:
Assignee:
No assignee Edit question
Solved by:
Jan Stránský
Solved:
2019-12-15
Last query:
2019-12-15
2019-12-15
 Jan Stránský (honzik) said on 2019-12-15: #1

Hello,
would it be possible to check with newer CGAL version? some problems are solved this way (see e.g. ).
Disclaimer: no idea if it helps for this specific case..
cheers
Jan

 Mahdeyeh (mahdiye.sky) said on 2019-12-15: #2

I install this CGAL version that was possible on ubuntu 16.04 :
libcgal-dev:
Installed: 4.7-4
Candidate: 4.7-4
Version table:
*** 4.7-4 500

Then run model again...but again simulating stopped:

"terminate called after throwing an instance of 'CGAL::Assertion_exception'
what(): CGAL ERROR: assertion violation!
Expr: dexp != 2047
File: /usr/include/CGAL/Mpzf.h
Line: 433
Explanation: Creating an Mpzf from infinity or NaN.
Aborted (core dumped)"

regards,
Mahdeyeh Jan Stránský (honzik) said on 2019-12-15: #3

I suggest to try on a newer system, yade and cgal (could be a system in virtualbox and yadedaily, should not take much time to install and also there is no risk related to system upgrade)
But again, with no guarantee to succeed..
cheers
Jan

 Mahdeyeh (mahdiye.sky) said on 2019-12-15: #4

Thanks Jan

I should do this. I try

Regards,
Mahdeyeh