@article{ author = {ShakhsiDastgahian, M. and KhoshbinGhomash, H.}, title = {Subspace-Based Approaches for Hybrid Millimeter-Wave Channel Estimation}, abstract ={Millimeter wave communication (mmWC) is a promising volunteer for 5G communication systems with high data rates. To subdue the channel propagation characteristics in this frequency band, high dimensional antenna arrays need to be deployed at transceiver. Employing such a deployment, prevents to use of ADC or RF chain in each branch of MIMO system because of power constraints. Thus, Such systems impose to have a hybrid analog/digital precoding/combining architecture. Hence, channel estimation revision seems to be essential. This paper propose new algorithms to estimate the mmW channel by exploiting the sparse nature of the channel and finding the subspace of received signal vectors based on MUSIC. By combining the multiple measurement vector (MMV) concept, MISIC , subspace augmentation (SA) and two-stage orthogonal subspace matching pursuit (TOSMP) approaches, we try to recover the indices of non-zero elements of an unknown channel matrix accurately even under the defective- rank condition. These indices are called support in the context. Simulation results indicate MUSIC-based approaches offer lower estimation error and higher sum rates compared with conventional MMV solutions.}, Keywords = {Millimeter wave MIMO systems, sparse channel estimation, support, multiple measurement vectors (MMV), subspace augmentation (SA)}, volume = {9}, Number = {4}, pages = {1-10}, publisher = {ICT Research Institute(ITRC)}, title_fa = {}, abstract_fa ={}, keywords_fa = {}, url = {http://ijict.itrc.ac.ir/article-1-22-en.html}, eprint = {http://ijict.itrc.ac.ir/article-1-22-en.pdf}, journal = {International Journal of Information and Communication Technology Research}, issn = {2251-6107}, eissn = {2783-4425}, year = {2017} } @article{ author = {Ebrahimkhani, Atena and Akhbari, Bahareh and Seyfe, Babak}, title = {A Fairness-Guaranteed Game-Theoretic Perspective in Multi-User Interference Channel}, abstract ={In this paper, a novel game theoretic perspective with pricing scheme over a multi-user Gaussian interference channel is presented. The Kalai-Smorodinsky bargaining solution (KSBS) as a measure for guaranteeing fairness in resource allocation among users on the weak Gaussian interference channel is investigated. By using the treating interference as noise (TIN) scenario and applying proper prices for the transmit power of each user the result of the proposed game settles on a unique fair point. Also, an iterative algorithm is proposed that converges to the KSBS when users update their transmit powers and prices. Numerical results confirm analytical development.}, Keywords = {Gaussian Interference Channel, Game Theory, Pricing Scheme, Nash Equilibrium, Kalai-Smorodinsky Bargaining Solution, Fairness}, volume = {9}, Number = {4}, pages = {11-18}, publisher = {ICT Research Institute(ITRC)}, title_fa = {}, abstract_fa ={}, keywords_fa = {}, url = {http://ijict.itrc.ac.ir/article-1-23-en.html}, eprint = {http://ijict.itrc.ac.ir/article-1-23-en.pdf}, journal = {International Journal of Information and Communication Technology Research}, issn = {2251-6107}, eissn = {2783-4425}, year = {2017} } @article{ author = {Banaie, Fatemeh and Hosseini, Seyed Amin and Yaghmaee, Mahamad Hossei}, title = {Cloud-Based Large-Scale Sensor Networks: Motivation, Taxonomies, and Open Challenges}, abstract ={Recently, the integration of ubiquitous wireless sensor network (WSN) and powerful cloud computing (CC) has attracted growing attention and efforts in both academic and industry. In this new paradigm, cloud computing can be exploited to perform analysis of online as well as offline data streams provided by sensor networks. This can help to deal with the inherent limitations of WSN in combining and analyzing of the heterogeneous large number of sensory data. The study we present in this paper provides a comprehensive analysis and discussion of the representative works on large scale WSNs, the need for integrating sensor with cloud, the main challenges deriving from such integration, and future research directions in this promising field.}, Keywords = {Cloud computing, integration, heterogeneous, Sensor-Cloud infrastructur.}, volume = {9}, Number = {4}, pages = {19-28}, publisher = {ICT Research Institute(ITRC)}, title_fa = {}, abstract_fa ={}, keywords_fa = {}, url = {http://ijict.itrc.ac.ir/article-1-24-en.html}, eprint = {http://ijict.itrc.ac.ir/article-1-24-en.pdf}, journal = {International Journal of Information and Communication Technology Research}, issn = {2251-6107}, eissn = {2783-4425}, year = {2017} } @article{ author = {Hartoonian, Armond and Khademzadeh, Ahm}, title = {Hub location Allocation Problem in Computer Networks Using Intelligent Optimization Algorithms}, abstract ={One of the new issues that have been raised in recent years is the hub network design problem. The hubs are collection and distribution centers that are used for the purpose of less connections and more of indirect than direct communications. They are interface facilities which are used as switch centers to collect and distribute flows in the network. They determine routes and organize traffic between source-destination in order to provide high performance and be more inexpensive. In the hub location problem, the aim is to find a suitable location for the hub and routes for sending information from a source to a destination, in order to reduce costs and gain desired purpose by multiple transfers between the hubs. In this paper, teaching and learning based optimization, particle swarm optimization and imperialist competitive algorithm were studied for locating optimally hubs and allocating nodes to the nearest located hub nodes. Experimental results show that optimal location for hubs by using cluster-based optimization algorithm (TLBO) successfully has been performed with extreme accuracy and precision.}, Keywords = {Hub Location-allocation, Network, Optimization algorithm, TLBO}, volume = {9}, Number = {4}, pages = {29-36}, publisher = {ICT Research Institute(ITRC)}, title_fa = {}, abstract_fa ={}, keywords_fa = {}, url = {http://ijict.itrc.ac.ir/article-1-25-en.html}, eprint = {http://ijict.itrc.ac.ir/article-1-25-en.pdf}, journal = {International Journal of Information and Communication Technology Research}, issn = {2251-6107}, eissn = {2783-4425}, year = {2017} } @article{ author = {Keyvanpour, MohammadReza and Kholghi, Mahnoosh and Haghani, Sogol}, title = {Hybrid of Active Learning and Dynamic Self-Training for Data Stream Classification}, abstract ={Most of the data stream classification methods need plenty of labeled samples to achieve a reasonable result. However, in a real data stream environment, it is crucial and expensive to obtain labeled samples, unlike the unlabeled ones. Although Active learning is one way to tackle this challenge, it ignores the effect of unlabeled instances utilization that can help with strength supervised learning. This paper proposes a hybrid framework named “DSeSAL”, which combines active learning and dynamic self-training to achieve both strengths. Also, this framework introduces variance based self-training that uses minimal variance as a confidence measure. Since an early mistake by the base classifier in self-training can reinforce itself by generating incorrectly labeled data, especially in multi-class condition. A dynamic approach to avoid classifier accuracy deterioration, is considered. The other capability of the proposed framework is controlling the accuracy reduction by specifying a tolerance measure. To overcome data stream challenges, i.e., infinite length and evolving nature, we use the chunking method along with a classifier ensemble. A classifier is trained on each chunk and with previous classifiers form an ensemble of M such classifiers. Experimental results on synthetic and real-world data indicate the performance of the proposed framework in comparison with other approaches.}, Keywords = {Computer Science, Data Mining, Semi-supervised learning, Classification, Data Stream.}, volume = {9}, Number = {4}, pages = {37-49}, publisher = {ICT Research Institute(ITRC)}, title_fa = {}, abstract_fa ={}, keywords_fa = {}, url = {http://ijict.itrc.ac.ir/article-1-26-en.html}, eprint = {http://ijict.itrc.ac.ir/article-1-26-en.pdf}, journal = {International Journal of Information and Communication Technology Research}, issn = {2251-6107}, eissn = {2783-4425}, year = {2017} } @article{ author = {Karamizadeh, Sasan and Arabsorkhi, Abouzar}, title = {Enhancement of Illumination scheme for Adult Image Recognition}, abstract ={Biometric-based techniques have emerged as the most promising option for individual recognition. This task is still a challenge for computer vision systems. Several approaches to adult image recognition, which include the deep neural network and traditional classifier, have been proposed. Different image condition factors such as expressions, occlusion, poses, and illuminations affect the facial recognition system. A reasonable amount of illumination variations between the gallery and probe images need to be taken into account in adult image recognition algorithms. In the context of adult image verification, illumination variation plays a vital role and this factor will most likely result in misclassification. Different architectures and different parameters have been tested in order to improve the classification’s accuracy. This proposed method contains four steps, which begin with Fuzzy Deep Neural Network Segmentation. This step is employed in order to segment an image based on illumination intensity. Histogram Truncation and Stretching is utilized in the second step for improving histogram distribution in the segmented area. The third step is Contrast Limited Adaptive Histogram Equalization (CLAHE). This step is used to enhance the contrast of the segmented area. Finally, DCT-II is applied and low-frequency coefficients are selected in a zigzag pattern for illumination normalization. In the proposed method, AlexNet architecture is used, which consists of 5 convolutional layers, max-pooling layers, and fully connected layers. The image is passed through a stack of convolutional layers after fuzzy neural representation, where we used filter 8 × 8. The convolutional stride is fixed to 1 pixel. After every convolution, there is a subsampling layer, which consists of a 2×2 kernel to do max pooling. This can help to reduce the training time and compute complexity of the network. The proposed scheme will be analyzed and its performance in accuracy and effectiveness will be evaluated. In this research, we have used 80,400 images, which are imported from two datasets - the Compaq and Poesia datasets - and used images found on the Internet.}, Keywords = {adult image, illumination, fuzzy deep neural network segmentation, Histogram truncation and stretching, DCTII, AlexNet, Convolutional }, volume = {9}, Number = {4}, pages = {50-56}, publisher = {ICT Research Institute(ITRC)}, title_fa = {}, abstract_fa ={}, keywords_fa = {}, url = {http://ijict.itrc.ac.ir/article-1-27-en.html}, eprint = {http://ijict.itrc.ac.ir/article-1-27-en.pdf}, journal = {International Journal of Information and Communication Technology Research}, issn = {2251-6107}, eissn = {2783-4425}, year = {2017} } @article{ author = {Keramati, Marj}, title = {Dynamic Risk Assessment System for the Vulnerability Scoring}, abstract ={One of the key factors that endangers network security is software vulnerabilities. So, increasing growth of vulnerability emergence is a critical challenge in security management. Also, organizations constantly encounter the limited budget problem. Therefore, to do network hardening in a cost-benefit manner, quantitative vulnerability assessment for finding the most critical vulnerabilities is a vital issue. The most prominent vulnerability scoring systems is CVSS (Common Vulnerability Scoring System) that ranks vulnerabilities based on their intrinsic characteristics. But in CVSS, Temporal features or the effect of existing patches and exploit tools in risk estimation of vulnerabilities are ignored. So, CVSS scores are not accurate. Another deficiency with CVSS that limits its application in real networks is that, in CVSS, only a small set of scores is used for discriminating between numerous numbers of vulnerabilities.  To improve the difficulties with existing scoring systems, here some security metrics are defined that rank vulnerabilities by considering their temporal features beside their intrinsic ones. Also, by the aim of improving scores diversity in CVSS, a new method is proposed for Impact estimation of vulnerability exploitation on security parameters of the network. Performing risk assessment by considering the type of the attacker which endangers the network security most is another novelty of this paper.}, Keywords = {CVSS, Risk, Vulnerability, Impact, Network Hardening, Security Metric, exploit, patch}, volume = {9}, Number = {4}, pages = {57-68}, publisher = {ICT Research Institute(ITRC)}, title_fa = {}, abstract_fa ={}, keywords_fa = {}, url = {http://ijict.itrc.ac.ir/article-1-28-en.html}, eprint = {http://ijict.itrc.ac.ir/article-1-28-en.pdf}, journal = {International Journal of Information and Communication Technology Research}, issn = {2251-6107}, eissn = {2783-4425}, year = {2017} } @article{ author = {Arezoomand, Masoud and KalantariMeybodi, Mohsen and Noori, Narges}, title = {Design and Implementation of a TEM Cell with Piecewise Linear Tapering}, abstract ={The transverse electromagnetic (TEM) cells are used for electromagnetic interference/compatibility (EMI/EMC) experiments of the small RF devices in a laboratory environment. The standard electromagnetic fields generated in the shielded environment of a TEM cell can also be used for calibration of the RF field probes.  In this paper, a 50 W open TEM cell is designed and fabricated to generate standard electromagnetic fields from 1 MHz up to 1 GHz. To overcome the impedance mismatch and improve the voltage standing wave ratio (VSWR) along the TEM cell, a new piecewise linear tapering method is proposed for the inner conductor of the cell. The resulted matching conditions of the new tapering method are presented through some numerical simulations and measurements and compared to those of the conventional simple linear tapering.}, Keywords = {TEM Cell design, EMI/EMC experiments, calibration of RF }, volume = {9}, Number = {3}, pages = {1-6}, publisher = {ICT Research Institute(ITRC)}, title_fa = {}, abstract_fa ={}, keywords_fa = {}, url = {http://ijict.itrc.ac.ir/article-1-29-en.html}, eprint = {http://ijict.itrc.ac.ir/article-1-29-en.pdf}, journal = {International Journal of Information and Communication Technology Research}, issn = {2251-6107}, eissn = {2783-4425}, year = {2017} } @article{ author = {Keramati, Marj}, title = {An Attack Graph Based Method for Predictive Risk Evaluation of Zero-Day Attacks}, abstract ={Performing risk assessment of computer networks is inevitable in the process of network hardening. To do efficient attack prevention, risk evaluation must be done in an accurate and quantitative manner. Such risk assessment requires thorough understanding of attack’s causes or vulnerabilities and their related characteristics. But one major problem is that, there are vulnerabilities that are known by attackers but there is no information about them in databases like NVD (National Vulnerability Database). Such vulnerabilities are referred to as unknown or zero day attacks. Existing standards like NVD ignore the effect of unknown attacks in risk assessment of computer networks. In this paper, by defining some attack graph based security metrics, we proposed an innovative method for risk evaluation of multi-step Zero-Day Attacks. Proposed method by predicting the intrinsic features of Zero-Day attacks makes their risk estimation possible. Considering the effect of Temporal features of vulnerabilities have made our approach a Dynamic Risk Estimator}, Keywords = {Zero day attack, CVSS, Vulnerability, Risk Assessment, Security Metic, Network Hardening, Intrusion Prevention }, volume = {9}, Number = {3}, pages = {7-16}, publisher = {ICT Research Institute(ITRC)}, title_fa = {}, abstract_fa ={}, keywords_fa = {}, url = {http://ijict.itrc.ac.ir/article-1-30-en.html}, eprint = {http://ijict.itrc.ac.ir/article-1-30-en.pdf}, journal = {International Journal of Information and Communication Technology Research}, issn = {2251-6107}, eissn = {2783-4425}, year = {2017} } @article{ author = {salahi, ahmad and Enayatizadeh, Jav}, title = {Domain Ontology to Distinguish Different Types of Rootkits}, abstract ={Rootkit is an auxiliary tool for sniffing, stealing and hiding, so it has become the key component in almost all successful attacks. Analysis of rootkits will provide system administrators and security software managers the ability to detect and prevent a computer being compromised. Ontology will provide detailed conceptualization to represent the rootkit concepts and its relationships to other security concepts in cyber-attack domain. In this paper we presented an ontology for rootkits which contains many concepts relating to security, cyber-attacks and operating systems. We divided rootkits according to four attributes, and expanded the ontology for rootkits accordingly. This ontology can be used to distinguish different types of rootkits.}, Keywords = {Ontology, Rootkit, Malware, Security}, volume = {9}, Number = {3}, pages = {17-24}, publisher = {ICT Research Institute(ITRC)}, title_fa = {}, abstract_fa ={}, keywords_fa = {}, url = {http://ijict.itrc.ac.ir/article-1-32-en.html}, eprint = {http://ijict.itrc.ac.ir/article-1-32-en.pdf}, journal = {International Journal of Information and Communication Technology Research}, issn = {2251-6107}, eissn = {2783-4425}, year = {2017} } @article{ author = {Abazari, Farzaneh and Analoui, Morteza and Takabi, Hass}, title = {Multi-Objective Response to Co-Resident Attacks in Cloud Environment}, abstract ={Cloud computing is a dynamic environment that offers variety of on-demand services with low cost. However, customers face new security risks due to shared infrastructure in the cloud. Co-residency of virtual machines on the same physical machine, leads to several threats for cloud tenants. Cloud administrators are often encountered with a more challenging problem since they have to work within a fixed budget for cloud hardening. The problem is how to select a subset of countermeasures to be within the budget and yet minimize the residual damage to the cloud caused by malicious VMs. We address this problem by introducing a novel multi-objective attack response system. We consider response cost, co-residency threat, and virtual machines interactions to select optimal response in face of the attack. Optimal response selection as a multi-objective optimization problem calculates alternative responses, with minimum threat and cost. Our method estimates threat level based on the collaboration graph and suggests proper countermeasures based on threat type with minimum cost. Experimental result shows that our system can suggest optimal responses based on the current state of the cloud.}, Keywords = {Cloud Computing,, Attack Response,, Cloud Security,, Co-resident Attack,, Graph Theory }, volume = {9}, Number = {3}, pages = {25-36}, publisher = {ICT Research Institute(ITRC)}, title_fa = {}, abstract_fa ={}, keywords_fa = {}, url = {http://ijict.itrc.ac.ir/article-1-31-en.html}, eprint = {http://ijict.itrc.ac.ir/article-1-31-en.pdf}, journal = {International Journal of Information and Communication Technology Research}, issn = {2251-6107}, eissn = {2783-4425}, year = {2017} } @article{ author = {Shabihi, Negar and Taghiyareh, Fattaneh and Abdoli, Mohammad Hossei}, title = {Enhancement of educational games based on personality type indicators}, abstract ={Previous studies indicate that the use of adaptive learning and game techniques enhances learning process; nevertheless, the adaptation based on learner’s personality has not been well researched in serious-game’s literature. There are plenty of game-elements which can be used in an educational game, but the effect of them may vary due to differences in personality types of learners. The purpose of this paper is to investigate the impact of various game-elements on learning outcomes. With the aim of improving learning process we have focused on designing a game-based learning where the game-elements have been personalized based on learner’s personality types. The developed game contains a non-adaptive mode and also an in-game adaptive mode that assigns game-elements due to learner’s personality type. The results represented significant difference between engagement levels of two modes; in addition, learning outcomes were better for the adaptive mode. Besides, in this paper the sensitivity analysis of various personality dimensions relative to the game-elements has been determined and presented in a corresponding table. The results of this paper indicates that a well-designed game environment can improve the learning process. This paper presents the prominent and influential elements for each of personality types. Results of this paper can be used to improve effect of using game-elements in designing learning environments.}, Keywords = {Game-based learning, Game design, Game-element, Adaptive learning, Personalization, MBTI, Personality }, volume = {9}, Number = {3}, pages = {37-45}, publisher = {ICT Research Institute(ITRC)}, title_fa = {}, abstract_fa ={}, keywords_fa = {}, url = {http://ijict.itrc.ac.ir/article-1-33-en.html}, eprint = {http://ijict.itrc.ac.ir/article-1-33-en.pdf}, journal = {International Journal of Information and Communication Technology Research}, issn = {2251-6107}, eissn = {2783-4425}, year = {2017} } @article{ author = {Hajary, Hajar and Ahmadi, Ali and Khani, Maryam}, title = {A novel approach for learning improvement in interactive classroom environments using learning automata}, abstract ={Determining the best way of learning and acquiring knowledge, especially in intelligent tutoring systems has drawn researchers' attention during past years. Studies conducted on E-learning systems and strategies proposed to improve the quality of these systems, indicate that the main learning resources for students in an educational environment are provided by two crucial factors. The first is the teacher who can basically influence students’ success through demonstrating her ability and skills, and the second is interaction among students. In this article, a new modeling approach is presented for improving learning/teaching models as well as interaction among learners, from which the most benefit can be derived by learners. The proposed model uses the learning automata for modeling the teacher and her behavior in such a way that she can also learn and teach better at the same time, thus improves her teaching skills. The model also uses cellular learning automata in order to model behavior of the learners as well as interactions between the learners for knowledge acquisition. The results indicate that in addition to teacher’s skills, the interaction/communication among learners can significantly improve the quality and speed of learning as compared with previous methods.}, Keywords = { tutorial like system, interactions, learning automata, cellular learning automata }, volume = {9}, Number = {3}, pages = {47-58}, publisher = {ICT Research Institute(ITRC)}, title_fa = {}, abstract_fa ={}, keywords_fa = {}, url = {http://ijict.itrc.ac.ir/article-1-34-en.html}, eprint = {http://ijict.itrc.ac.ir/article-1-34-en.pdf}, journal = {International Journal of Information and Communication Technology Research}, issn = {2251-6107}, eissn = {2783-4425}, year = {2017} } @article{ author = {Tarokh, Mohammadjafar and EsmaeiliGookeh, Mahs}, title = {A Stochastic Approach for Valuing Customers}, abstract ={The present study attempts to develop a new model for computing customer lifetime value. The customer lifetime value defined in this paper is the combination of present value and future value. As an innovation the CLV modeling of this paper is based on customer behavior modeling, done by data mining techniques. By extracting the profit vector related to each type of customer behavior, calculation of present study was done, then by utilizing Markov chain model we predict future value and count customer lifetime value. A new churn model was contributed by authors to manage unprofitable CRM costs; utilizing this churn model, the proposed CLV model can cause more profitability for the enterprise. The new CLV model of this paper was validated by historical customer data of a composite manufacturing company.}, Keywords = {customer, Markov chain model, data mining, future value, present value, customer behavior }, volume = {9}, Number = {3}, pages = {59-66}, publisher = {ICT Research Institute(ITRC)}, title_fa = {}, abstract_fa ={}, keywords_fa = {}, url = {http://ijict.itrc.ac.ir/article-1-35-en.html}, eprint = {http://ijict.itrc.ac.ir/article-1-35-en.pdf}, journal = {International Journal of Information and Communication Technology Research}, issn = {2251-6107}, eissn = {2783-4425}, year = {2017} } @article{ author = {Vardi, Fatemeh and KhademZadeh, Ahmad and Reshadi, Midi}, title = {ARTEMIS: A Simulator Tool for Heterogeneous Network-on-Chip}, abstract ={Complex homogeneous network-on-chip or heterogeneous network-on-chip increases the need of determining and developing simulation tools for designer to evaluate and comparison network performance. Towards this end, ARTEMIS tool, a matlab based simulator environment is developed. This simulator offers some collections of network configuration regarding to the topology graph, routing algorithm and switching strategy, including allocation scheme for a target application. Consequently, designers can choose the number and depth of virtual channels and the capacity of each link by applying an efficient allocation scheme, which is provided by this tool. Average latency and throughput are evaluation performance metrics that are measured with proposed simulator tool.}, Keywords = {interconnection network, homogeneous NoC, heterogeneous NoC, simulator, performance }, volume = {9}, Number = {2}, pages = {1-9}, publisher = {ICT Research Institute(ITRC)}, title_fa = {}, abstract_fa ={}, keywords_fa = {}, url = {http://ijict.itrc.ac.ir/article-1-36-en.html}, eprint = {http://ijict.itrc.ac.ir/article-1-36-en.pdf}, journal = {International Journal of Information and Communication Technology Research}, issn = {2251-6107}, eissn = {2783-4425}, year = {2017} } @article{ author = {Salehi, majid and Tadayon, Mohammad Hesam and Daryabar, Fari}, title = {An Attack-Defense Model for the Binder on the Android Kernel Level}, abstract ={In this paper, we consider to seek vulnerabilities and we conduct possible attacks on the crucial and essential parts of Android OSs architecture including the framework and the Android kernel layers. As a regard, we explain the Binder component of Android OS from security point of view. Then, we demonstrate how to penetrate into the Binder and control data exchange mechanism in Android OS by proposing a kernel level attack model based on the hooking method. In addition, we provide a method to detect these kinds of attacks on Android frameworks and the kernel layer. As a result, by implementing the attack model, it is illustrated that the Android processes are detectable and the data can be extracted from any process and system calls. On the other hand, by using our detection proposed method the possibility of using this attack approach in the installed applications on the Android smartphones will be sharply decreased.}, Keywords = {smartphone security, android security, android penetration testing, binder component, kernel level attack }, volume = {9}, Number = {2}, pages = {11-17}, publisher = {ICT Research Institute(ITRC)}, title_fa = {}, abstract_fa ={}, keywords_fa = {}, url = {http://ijict.itrc.ac.ir/article-1-37-en.html}, eprint = {http://ijict.itrc.ac.ir/article-1-37-en.pdf}, journal = {International Journal of Information and Communication Technology Research}, issn = {2251-6107}, eissn = {2783-4425}, year = {2017} } @article{ author = {Rahimi, Marziea and Zahedi, Morteza and Mashayekhi, Ho}, title = {A Probabilistic Topic Model based on an Arbitrary-Length Co-occurrence Window}, abstract ={Probabilistic topic models have been very popular in automatic text analysis since their introduction. These models work based on word co-occurrence, but are not very flexible with respect to the context in which cooccurrence is considered. Many probabilistic topic models do not allow for taking local or spatial data into account. In this paper, we introduce a probabilistic topic model that benefits from an arbitrary-length co-occurrence window and encodes local word dependencies for extracting topics. We assume a multinomial distribution with Dirichlet prior over the window positions to let the words in every position have a chance to influence topic assignments. In the proposed model, topics being shown by word pairs have a more meaningful presentation. The model is applied on a dataset of 2000 documents. The proposed model produces interesting meaningful topics and reduces the problem of sparseness.}, Keywords = {probabilistic topic modeling, co-occurrence, context window, Gibbs sampling, generative models }, volume = {9}, Number = {2}, pages = {19-25}, publisher = {ICT Research Institute(ITRC)}, title_fa = {}, abstract_fa ={}, keywords_fa = {}, url = {http://ijict.itrc.ac.ir/article-1-38-en.html}, eprint = {http://ijict.itrc.ac.ir/article-1-38-en.pdf}, journal = {International Journal of Information and Communication Technology Research}, issn = {2251-6107}, eissn = {2783-4425}, year = {2017} } @article{ author = {Faroughi, Azadeh and Javidan, Rez}, title = {A Novel Density based Clustering Method using Nearest and Farthest Neighbor with PCA}, abstract ={Common nearest-neighbor density estimators usually do not work well for high dimensional datasets. Moreover, they have high time complexity of O(n2) and require high memory usage especially when indexing is used. In order to overcome these limitations, we proposed a new method that calculates distances to nearest and farthest neighbor nodes to create dataset subgroups. Therefore computational time complexity becomes of O(nlogn) and space complexity becomes constant. After subgroup formation, assembling technique is used to derive correct clusters. In order to overcome high dimensional datasets problem, Principal Component Analysis (PCA) in the clustering method is used, which preprocesses high-dimensional data. Many experiments on synthetic data sets are carried out to demonstrate the feasibility of the proposed method. Furthermore we compared this algorithm to the similar algorithm –DBSCAN- on real-world datasets and the results showed significantly higher accuracy of the proposed method.}, Keywords = { nearest_neighbor density estimator, farthest neighbor, subgroups, principal component analysis(PCA) }, volume = {9}, Number = {2}, pages = {27-34}, publisher = {ICT Research Institute(ITRC)}, title_fa = {}, abstract_fa ={}, keywords_fa = {}, url = {http://ijict.itrc.ac.ir/article-1-39-en.html}, eprint = {http://ijict.itrc.ac.ir/article-1-39-en.pdf}, journal = {International Journal of Information and Communication Technology Research}, issn = {2251-6107}, eissn = {2783-4425}, year = {2017} } @article{ author = {Mousavi, Zahra and Faili, Heshaam and Fadaee, Marzieh}, title = {Persian Wordnet Construction using Supervised Learning}, abstract ={This paper presents an automated supervised method for Persian wordnet construction. Using a Persian corpus and a bi-lingual dictionary, the initial links between Persian words and Princeton WordNet synsets have been generated. These links will be discriminated later as correct or incorrect by employing seven features in a trained classification system. The whole method is just a classification system which has been trained on a train set containing a pre-existing Persian wordnet, FarsNet, as a set of correct instances. A set of some sophisticated distributional and semantic features is proposed to be used in the classification system. Furthermore, a set of randomly selected links have been added to training data as incorrect instances. The links classified as correct are collected to be included in the final wordnet. State of the art results on the automatically derived Persian wordnet is achieved. The resulted wordnet with a precision of 91.18% includes more than 16,000 words and 22,000 synsets.}, Keywords = {wordnet, ontology, supervised, Persian language }, volume = {9}, Number = {2}, pages = {35-44}, publisher = {ICT Research Institute(ITRC)}, title_fa = {}, abstract_fa ={}, keywords_fa = {}, url = {http://ijict.itrc.ac.ir/article-1-40-en.html}, eprint = {http://ijict.itrc.ac.ir/article-1-40-en.pdf}, journal = {International Journal of Information and Communication Technology Research}, issn = {2251-6107}, eissn = {2783-4425}, year = {2017} } @article{ author = {Saghafi, Fatemeh and Heshmati, Zainabolhoda and Heydari, Mahmood and Khansari, Mohamm}, title = {Critical Success factors for implementing PACS Technology in Iran\'s Hospitals}, abstract ={This study clarified the critical success factors (CSFs) that effect on adopting and implementing PACS and its applications in Iranian hospitals. We identified CSFs by literature review and interview by experts. Then examined its importance by T-test with 110 respondents. Kaiser-Meyer test and Varimax rotation are used for validity of data. Factor analysis is used for clustering. And the results are examined in 11hospitals who have implemented PACS. 20 of 23 CSFs, are distinguished important by T-test and clustered in 6 groups by Factor analysis. (1st) Ability to choose and purchase the appropriate PACS; (2nd) Being patient-centered and paying attention to patient satisfaction; were the most important CSFs. 77% questionnaires were completed by less than 2% miss data. The results are approved in 11 hospitals in Iran. This paper fulfils an identified need to study how PACS can be adopted in Iran's hospital by determining 6 CSFs. They can be applicable for policy makers and managers of other hospitals of Iran and some developing countries such as Iran to use of PACS as integrated IT technology.}, Keywords = {PACS, Cloud computing, Futures trends, CSF, decision makers }, volume = {9}, Number = {2}, pages = {45-52}, publisher = {ICT Research Institute(ITRC)}, title_fa = {}, abstract_fa ={}, keywords_fa = {}, url = {http://ijict.itrc.ac.ir/article-1-41-en.html}, eprint = {http://ijict.itrc.ac.ir/article-1-41-en.pdf}, journal = {International Journal of Information and Communication Technology Research}, issn = {2251-6107}, eissn = {2783-4425}, year = {2017} } @article{ author = {Sheikhan, Mansour and Bostani, Hami}, title = {A Security Mechanism for Detecting Intrusions in Internet of Things Using Selected Features Based on MI-BGSA}, abstract ={Internet of things (IoT) is a novel emerging approach in computer networks wherein all heterogeneous objects around us, which usually are resource-constrained objects, can connect to each other and also the Internet by using a broad range of technologies. IoT is a hybrid network which includes the Internet and also wireless sensor networks (WSNs) as the main components of IoT; so, implementing security mechanisms in IoT seems necessary. This paper introduces a novel intrusion detection architecture model for IoT that provides the possibility of distributed detection. The proposed hybrid model uses anomaly and misuse intrusion detection agents based on the supervised and unsupervised optimum-path forest models for providing the ability to detect internal and externals attacks, simultaneously. The number of input features to the proposed classifier is reduced by a hybrid feature selection algorithm, as well. The experimental results of simulated scenarios show the superior performance of proposed security mechanism in multi-faceted detection.}, Keywords = {Internet of things, intrusion detection, anomaly-based, misuse-based, optimum-path forest }, volume = {9}, Number = {2}, pages = {53-62}, publisher = {ICT Research Institute(ITRC)}, title_fa = {}, abstract_fa ={}, keywords_fa = {}, url = {http://ijict.itrc.ac.ir/article-1-42-en.html}, eprint = {http://ijict.itrc.ac.ir/article-1-42-en.pdf}, journal = {International Journal of Information and Communication Technology Research}, issn = {2251-6107}, eissn = {2783-4425}, year = {2017} } @article{ author = {Hosseini, Elahe sadat and Berangi, Rez}, title = {A Sub-Optimal Policy for Connection Admission Control Mechanism in Cognitive Radio Sensor Networks}, abstract ={Satisfying the quality of service (QoS) is a crucial issue in cognitive radio sensor networks (CRSNs) due to the highly variable nature of cognitive radio channels. Connection admission control (CAC) is a beneficial approach to manage the traffic to provide desired QoS. A CAC is proposed in this paper to optimize the packet loss ratio, jitter of packets and end to end delay in CRSNs. The proposed CAC decides based on the priority of data flows, network state and number of available channels. An estimation formula is proposed through a graph coloring approach to evaluate the required number of channels of network states. The proposed CAC is modeled by a semi Markov decision process (SMDP) and a sub-optimal policy is obtained by a value iteration method to achieve the maximum reward in network. Simulation results demonstrate that the proposed mechanism outperforms the recent proposed admission control mechanism in CRSNs.}, Keywords = {Cognitive radio sensor networks, admission control, QoS, semi Markov decision process (SMDP) }, volume = {9}, Number = {1}, pages = {1-7}, publisher = {ICT Research Institute(ITRC)}, title_fa = {}, abstract_fa ={}, keywords_fa = {}, url = {http://ijict.itrc.ac.ir/article-1-43-en.html}, eprint = {http://ijict.itrc.ac.ir/article-1-43-en.pdf}, journal = {International Journal of Information and Communication Technology Research}, issn = {2251-6107}, eissn = {2783-4425}, year = {2017} } @article{ author = {Karimi, Meysam and Babamir, Seyed Mortez}, title = {QoS-aware web service composition using Gray Wolf Optimizer}, abstract ={In a service-oriented application, an integrated model of web services is composed of multiple abstract tasks. Each abstract task denotes a certain functionality that could be executed by a number of candidate web services with different qualities. The selection of a web service among candidates for execution of each task that is led to an optimal composition of selected web services is a NP-hard problem. In this paper, we adapt the Gray Wolf Optimizer (GWO) algorithm for selection of candidate web services whose composition is optimal. To evaluate the effectiveness of the proposed method, four quality parameters, response time, reliability, availability, and cost of web services are considered and the derived results are compared with several Particle Swarm Optimization (PSO) methods. The proposed method was executed in from 100 to 1000 times and the results showed that a better optimal rate (between 0.2 and 0.4) compared with PSO.}, Keywords = {Optimal web service composition, Gray Wolf Optimizer algorithm, Particle Swarm Optimization, Service oriented, Quality of service }, volume = {9}, Number = {1}, pages = {9-16}, publisher = {ICT Research Institute(ITRC)}, title_fa = {}, abstract_fa ={}, keywords_fa = {}, url = {http://ijict.itrc.ac.ir/article-1-44-en.html}, eprint = {http://ijict.itrc.ac.ir/article-1-44-en.pdf}, journal = {International Journal of Information and Communication Technology Research}, issn = {2251-6107}, eissn = {2783-4425}, year = {2017} } @article{ author = {Behkamal, Bahareh and Naghibzadeh, Mahmou}, title = {Inconsistency Repair to Improve the Alignment Results of Ontology Matchers}, abstract ={Ontology inconsistency is one of the most important topics in the field of ontology matching. Until now many matchers are introduced but most of them suffer from inconsistencies. Many of the ontology matching tools have severe problems with respect to the quality of matching results and therefore the results of matching process is not adequate. In this paper, we focus on this topic and present a new method to produce better results from the matching process. The major novelty of this paper is in detecting the inconsistencies in ontologies before starting the matching process. In this phase, many problems caused by ontology diversity are resolved. Besides, some new patterns and inconsistencies in ontologies are detected and then refactoring operations are applied on them. At the end, one of the well-known matchers in OAEI is selected to evaluate our work. Experimental results show that the transformed ontologies are more efficient than original unrepaired ones with respect to the standard evaluation measures.}, Keywords = {Ontology matching, Alignment, Inconsistency, Refactoring, Pattern detection }, volume = {9}, Number = {1}, pages = {17-23}, publisher = {ICT Research Institute(ITRC)}, title_fa = {}, abstract_fa ={}, keywords_fa = {}, url = {http://ijict.itrc.ac.ir/article-1-45-en.html}, eprint = {http://ijict.itrc.ac.ir/article-1-45-en.pdf}, journal = {International Journal of Information and Communication Technology Research}, issn = {2251-6107}, eissn = {2783-4425}, year = {2017} } @article{ author = {Alishirvani, Nasri}, title = {A Framework to Create a Certificate for e-Commerce Secure Transaction Protocol}, abstract ={The development of e-commerce requires security to win the confidence of its stakeholders. Among the common protocols to establish safe financial transactions, Secure Electronic Transaction (SET) protocol has more security providing a safe protocol of payments at the level of the communication network between the buyers, sellers, banks and payment gates. In this protocol, all participants in the transaction should receive Certificate Authority (CA) identity. This paper analyzes the secure communication solutions and variety secure communication contracts for financial transactions. Then, architecture is presented to establish a web-based Certificate Authority (CA) identity for elements of Secure Electronic Transaction (SET) protocol and its implementation is described. The presented Certificate Authority (CA) identity in the article can process the requests of entities for online processing and transmit them through Hypertext Transfer Protocol (HTTP).}, Keywords = {Secure Transaction Protocol, Certificate Authority, Secure Electronic Transaction, Payment Gateway, Public Key Infrastructure }, volume = {9}, Number = {1}, pages = {25-32}, publisher = {ICT Research Institute(ITRC)}, title_fa = {}, abstract_fa ={}, keywords_fa = {}, url = {http://ijict.itrc.ac.ir/article-1-46-en.html}, eprint = {http://ijict.itrc.ac.ir/article-1-46-en.pdf}, journal = {International Journal of Information and Communication Technology Research}, issn = {2251-6107}, eissn = {2783-4425}, year = {2017} } @article{ author = {AshrafiPayaman, Nosratali and Kangavari, Mohammadrez}, title = {GSSC: Graph Summarization based on both Structure and Concepts}, abstract ={In this paper, we propose a new method for graph summarization named GSSC, Graph Summarization based on both Structure and Concepts. In this method, an attributed graph is summarized by considering both of its topology and related concepts. In this method, for a given attributed graph a new graph is constructed that an edge in this new graph represents structural and conceptual similarity of its two end points. Structural and conceptual similarity of two nodes not necessarily has the equal amount of importance in the weight of the resulting edge. For example, for a special case such as query answering, structure can be more important and vice versa. Similarity of two nodes is computed based on Jaccard similarity. This method has some advantages such as flexibility, simplicity, learning capability, user-orientation that makes it a better method for graph summarization. We implemented our method and the method proposed by Bei and evaluated these two methods on real-life dataset HEP_TH. Our experimental results showed effectiveness and efficiency of our proposed method.}, Keywords = {Graph summarization, super-node, similarity, conceptual summarization, summary}, volume = {9}, Number = {1}, pages = {33-44}, publisher = {ICT Research Institute(ITRC)}, title_fa = {}, abstract_fa ={}, keywords_fa = {}, url = {http://ijict.itrc.ac.ir/article-1-47-en.html}, eprint = {http://ijict.itrc.ac.ir/article-1-47-en.pdf}, journal = {International Journal of Information and Communication Technology Research}, issn = {2251-6107}, eissn = {2783-4425}, year = {2017} } @article{ author = {Cheraghchi, Hamideh Sadat and Zakerolhosseini, Ali}, title = {Mining Dynamic Communities based on a Novel Link-Clustering Algorithm}, abstract ={Discovering communities in time-varying social networks is one of the highly challenging area of research and researchers are welcome to propose new models for this domain. The issue is more problematic when overlapping structure of communities is going to be considered. In this research, we present a new online and incremental community detection algorithm called link-clustering which uses link-based clustering paradigm intertwined with a novel representative-based algorithm to handle these issues. The algorithm works in both weighted and binary networks and intrinsically allows for overlapping communities. Comparison with the state of art evolutionary algorithms and link-based clustering shows the accuracy of this method in detecting communities over times and motivates the extended research in link-based clustering paradigm for dynamic overlapping community detection purpose.}, Keywords = {social network, link clustering, dynamic network, evolutionary clustering, representative-based clustering }, volume = {9}, Number = {1}, pages = {45-51}, publisher = {ICT Research Institute(ITRC)}, title_fa = {}, abstract_fa ={}, keywords_fa = {}, url = {http://ijict.itrc.ac.ir/article-1-48-en.html}, eprint = {http://ijict.itrc.ac.ir/article-1-48-en.pdf}, journal = {International Journal of Information and Communication Technology Research}, issn = {2251-6107}, eissn = {2783-4425}, year = {2017} } @article{ author = {Mosharraf, Maedeh and Taghiyareh, Fattaneh}, title = {Pruning Concept Map to Geneate Ontology}, abstract ={Knowledge representation in the form of a concept map can be a good idea to categorize domain terms and their relations and help to generate ontology. Supplementing detail information to and pruning useless data from the concept map, which likes a skeleton in evolving ontology, can be semantically accomplished using the domain knowledge. In this paper, we propose a method using structural knowledge resources as well as tacit knowledge of experts to generate the ontology of eLearning domain. The concept map of eLearning is manually improved and finally verified using the group of eLearning experts. In order to enrich the ontology with merging into upcoming terms, the paper proposed an automatic method based on two external knowledge sources, Wikipedia and WordNet. The semantic similarity of concepts which is measured using the words hierarchy of WordNet combined with relations of concepts extracted from the Wikipedia graph is applied to link the new eLearning concepts to the domain ontology. The generated ontology is a dynamic knowledge source which can improve itself gradually. This integrated knowledge of eLearning domain can be used to model educational activities and to build, organize, and update specific learning resources.}, Keywords = {concept map, pruning, ontology generation, ontology enrichment, elearning, graph clustering, Wikipedia, WordNet }, volume = {9}, Number = {1}, pages = {53-61}, publisher = {ICT Research Institute(ITRC)}, title_fa = {}, abstract_fa ={}, keywords_fa = {}, url = {http://ijict.itrc.ac.ir/article-1-49-en.html}, eprint = {http://ijict.itrc.ac.ir/article-1-49-en.pdf}, journal = {International Journal of Information and Communication Technology Research}, issn = {2251-6107}, eissn = {2783-4425}, year = {2017} }